mirror of
https://github.com/privatevoid-net/nix-super.git
synced 2024-11-26 07:46:21 +02:00
Merge remote-tracking branch 'nixos/master'
This commit is contained in:
commit
fd78c9b000
560 changed files with 18557 additions and 12083 deletions
30
.clang-format
Normal file
30
.clang-format
Normal file
|
@ -0,0 +1,30 @@
|
||||||
|
BasedOnStyle: LLVM
|
||||||
|
IndentWidth: 4
|
||||||
|
BreakBeforeBraces: Custom
|
||||||
|
BraceWrapping:
|
||||||
|
AfterStruct: true
|
||||||
|
AfterClass: true
|
||||||
|
AfterFunction: true
|
||||||
|
AfterUnion: true
|
||||||
|
SplitEmptyRecord: false
|
||||||
|
PointerAlignment: Middle
|
||||||
|
FixNamespaceComments: false
|
||||||
|
SortIncludes: Never
|
||||||
|
#IndentPPDirectives: BeforeHash
|
||||||
|
SpaceAfterCStyleCast: true
|
||||||
|
SpaceAfterTemplateKeyword: false
|
||||||
|
AccessModifierOffset: -4
|
||||||
|
AlignAfterOpenBracket: AlwaysBreak
|
||||||
|
AlignEscapedNewlines: DontAlign
|
||||||
|
ColumnLimit: 120
|
||||||
|
BreakStringLiterals: false
|
||||||
|
BitFieldColonSpacing: None
|
||||||
|
AllowShortFunctionsOnASingleLine: Empty
|
||||||
|
AlwaysBreakTemplateDeclarations: Yes
|
||||||
|
BinPackParameters: false
|
||||||
|
BreakConstructorInitializers: BeforeComma
|
||||||
|
EmptyLineAfterAccessModifier: Leave # change to always/never later?
|
||||||
|
EmptyLineBeforeAccessModifier: Leave
|
||||||
|
#PackConstructorInitializers: BinPack
|
||||||
|
BreakBeforeBinaryOperators: NonAssignment
|
||||||
|
AlwaysBreakBeforeMultilineStrings: true
|
3
.clang-tidy
Normal file
3
.clang-tidy
Normal file
|
@ -0,0 +1,3 @@
|
||||||
|
# We use pointers to aggregates in a couple of places, intentionally.
|
||||||
|
# void * would look weird.
|
||||||
|
Checks: '-bugprone-sizeof-expression'
|
10
.github/CODEOWNERS
vendored
10
.github/CODEOWNERS
vendored
|
@ -10,16 +10,8 @@
|
||||||
# This file
|
# This file
|
||||||
.github/CODEOWNERS @edolstra
|
.github/CODEOWNERS @edolstra
|
||||||
|
|
||||||
# Public documentation
|
|
||||||
/doc @fricklerhandwerk
|
|
||||||
*.md @fricklerhandwerk
|
|
||||||
|
|
||||||
# Documentation of built-in functions
|
# Documentation of built-in functions
|
||||||
src/libexpr/primops.cc @fricklerhandwerk @roberth
|
src/libexpr/primops.cc @roberth
|
||||||
# Documentation on experimental features
|
|
||||||
src/libutil/experimental-features.cc @fricklerhandwerk
|
|
||||||
# Documentation on configuration settings
|
|
||||||
src/libstore/globals.hh @fricklerhandwerk
|
|
||||||
|
|
||||||
# Libstore layer
|
# Libstore layer
|
||||||
/src/libstore @thufschmitt
|
/src/libstore @thufschmitt
|
||||||
|
|
4
.github/PULL_REQUEST_TEMPLATE.md
vendored
4
.github/PULL_REQUEST_TEMPLATE.md
vendored
|
@ -10,6 +10,8 @@
|
||||||
|
|
||||||
<!-- Large change: Provide instructions to reviewers how to read the diff. -->
|
<!-- Large change: Provide instructions to reviewers how to read the diff. -->
|
||||||
|
|
||||||
# Priorities
|
# Priorities and Process
|
||||||
|
|
||||||
Add :+1: to [pull requests you find important](https://github.com/NixOS/nix/pulls?q=is%3Aopen+sort%3Areactions-%2B1-desc).
|
Add :+1: to [pull requests you find important](https://github.com/NixOS/nix/pulls?q=is%3Aopen+sort%3Areactions-%2B1-desc).
|
||||||
|
|
||||||
|
The Nix maintainer team uses a [GitHub project board](https://github.com/orgs/NixOS/projects/19) to [schedule and track reviews](https://github.com/NixOS/nix/tree/master/maintainers#project-board-protocol).
|
||||||
|
|
27
.github/labeler.yml
vendored
27
.github/labeler.yml
vendored
|
@ -1,23 +1,30 @@
|
||||||
"documentation":
|
"documentation":
|
||||||
- doc/manual/*
|
- changed-files:
|
||||||
- src/nix/**/*.md
|
- any-glob-to-any-file: "doc/manual/*"
|
||||||
|
- any-glob-to-any-file: "src/nix/**/*.md"
|
||||||
|
|
||||||
"store":
|
"store":
|
||||||
- src/libstore/store-api.*
|
- changed-files:
|
||||||
- src/libstore/*-store.*
|
- any-glob-to-any-file: "src/libstore/store-api.*"
|
||||||
|
- any-glob-to-any-file: "src/libstore/*-store.*"
|
||||||
|
|
||||||
"fetching":
|
"fetching":
|
||||||
- src/libfetchers/**/*
|
- changed-files:
|
||||||
|
- any-glob-to-any-file: "src/libfetchers/**/*"
|
||||||
|
|
||||||
"repl":
|
"repl":
|
||||||
- src/libcmd/repl.*
|
- changed-files:
|
||||||
- src/nix/repl.*
|
- any-glob-to-any-file: "src/libcmd/repl.*"
|
||||||
|
- any-glob-to-any-file: "src/nix/repl.*"
|
||||||
|
|
||||||
"new-cli":
|
"new-cli":
|
||||||
- src/nix/**/*
|
- changed-files:
|
||||||
|
- any-glob-to-any-file: "src/nix/**/*"
|
||||||
|
|
||||||
"with-tests":
|
"with-tests":
|
||||||
|
- changed-files:
|
||||||
# Unit tests
|
# Unit tests
|
||||||
- src/*/tests/**/*
|
- any-glob-to-any-file: "src/*/tests/**/*"
|
||||||
# Functional and integration tests
|
# Functional and integration tests
|
||||||
- tests/functional/**/*
|
- any-glob-to-any-file: "tests/functional/**/*"
|
||||||
|
|
||||||
|
|
2
.github/workflows/backport.yml
vendored
2
.github/workflows/backport.yml
vendored
|
@ -21,7 +21,7 @@ jobs:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- name: Create backport PRs
|
- name: Create backport PRs
|
||||||
# should be kept in sync with `version`
|
# should be kept in sync with `version`
|
||||||
uses: zeebe-io/backport-action@v2.2.0
|
uses: zeebe-io/backport-action@v2.4.1
|
||||||
with:
|
with:
|
||||||
# Config README: https://github.com/zeebe-io/backport-action#backport-action
|
# Config README: https://github.com/zeebe-io/backport-action#backport-action
|
||||||
github_token: ${{ secrets.GITHUB_TOKEN }}
|
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
23
.github/workflows/ci.yml
vendored
23
.github/workflows/ci.yml
vendored
|
@ -20,12 +20,12 @@ jobs:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: cachix/install-nix-action@v24
|
- uses: cachix/install-nix-action@v26
|
||||||
with:
|
with:
|
||||||
# The sandbox would otherwise be disabled by default on Darwin
|
# The sandbox would otherwise be disabled by default on Darwin
|
||||||
extra_nix_config: "sandbox = true"
|
extra_nix_config: "sandbox = true"
|
||||||
- run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
|
- run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
|
||||||
- uses: cachix/cachix-action@v13
|
- uses: cachix/cachix-action@v14
|
||||||
if: needs.check_secrets.outputs.cachix == 'true'
|
if: needs.check_secrets.outputs.cachix == 'true'
|
||||||
with:
|
with:
|
||||||
name: '${{ env.CACHIX_NAME }}'
|
name: '${{ env.CACHIX_NAME }}'
|
||||||
|
@ -62,10 +62,10 @@ jobs:
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
|
- run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
|
||||||
- uses: cachix/install-nix-action@v24
|
- uses: cachix/install-nix-action@v26
|
||||||
with:
|
with:
|
||||||
install_url: https://releases.nixos.org/nix/nix-2.13.3/install
|
install_url: https://releases.nixos.org/nix/nix-2.20.3/install
|
||||||
- uses: cachix/cachix-action@v13
|
- uses: cachix/cachix-action@v14
|
||||||
with:
|
with:
|
||||||
name: '${{ env.CACHIX_NAME }}'
|
name: '${{ env.CACHIX_NAME }}'
|
||||||
signingKey: '${{ secrets.CACHIX_SIGNING_KEY }}'
|
signingKey: '${{ secrets.CACHIX_SIGNING_KEY }}'
|
||||||
|
@ -84,7 +84,7 @@ jobs:
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
- run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
|
- run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
|
||||||
- uses: cachix/install-nix-action@v24
|
- uses: cachix/install-nix-action@v26
|
||||||
with:
|
with:
|
||||||
install_url: '${{needs.installer.outputs.installerURL}}'
|
install_url: '${{needs.installer.outputs.installerURL}}'
|
||||||
install_options: "--tarball-url-prefix https://${{ env.CACHIX_NAME }}.cachix.org/serve"
|
install_options: "--tarball-url-prefix https://${{ env.CACHIX_NAME }}.cachix.org/serve"
|
||||||
|
@ -114,12 +114,12 @@ jobs:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v4
|
||||||
with:
|
with:
|
||||||
fetch-depth: 0
|
fetch-depth: 0
|
||||||
- uses: cachix/install-nix-action@v24
|
- uses: cachix/install-nix-action@v26
|
||||||
with:
|
with:
|
||||||
install_url: https://releases.nixos.org/nix/nix-2.13.3/install
|
install_url: https://releases.nixos.org/nix/nix-2.20.3/install
|
||||||
- run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
|
- run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
|
||||||
- run: echo NIX_VERSION="$(nix --experimental-features 'nix-command flakes' eval .\#default.version | tr -d \")" >> $GITHUB_ENV
|
- run: echo NIX_VERSION="$(nix --experimental-features 'nix-command flakes' eval .\#default.version | tr -d \")" >> $GITHUB_ENV
|
||||||
- uses: cachix/cachix-action@v13
|
- uses: cachix/cachix-action@v14
|
||||||
if: needs.check_secrets.outputs.cachix == 'true'
|
if: needs.check_secrets.outputs.cachix == 'true'
|
||||||
with:
|
with:
|
||||||
name: '${{ env.CACHIX_NAME }}'
|
name: '${{ env.CACHIX_NAME }}'
|
||||||
|
@ -153,6 +153,9 @@ jobs:
|
||||||
IMAGE_ID=$(echo $IMAGE_ID | tr '[A-Z]' '[a-z]')
|
IMAGE_ID=$(echo $IMAGE_ID | tr '[A-Z]' '[a-z]')
|
||||||
|
|
||||||
docker tag nix:$NIX_VERSION $IMAGE_ID:$NIX_VERSION
|
docker tag nix:$NIX_VERSION $IMAGE_ID:$NIX_VERSION
|
||||||
docker tag nix:$NIX_VERSION $IMAGE_ID:master
|
docker tag nix:$NIX_VERSION $IMAGE_ID:latest
|
||||||
docker push $IMAGE_ID:$NIX_VERSION
|
docker push $IMAGE_ID:$NIX_VERSION
|
||||||
|
docker push $IMAGE_ID:latest
|
||||||
|
# deprecated 2024-02-24
|
||||||
|
docker tag nix:$NIX_VERSION $IMAGE_ID:master
|
||||||
docker push $IMAGE_ID:master
|
docker push $IMAGE_ID:master
|
||||||
|
|
7
.gitignore
vendored
7
.gitignore
vendored
|
@ -10,6 +10,7 @@ perl/Makefile.config
|
||||||
/stamp-h1
|
/stamp-h1
|
||||||
/svn-revision
|
/svn-revision
|
||||||
/libtool
|
/libtool
|
||||||
|
/config/config.*
|
||||||
|
|
||||||
# /doc/manual/
|
# /doc/manual/
|
||||||
/doc/manual/*.1
|
/doc/manual/*.1
|
||||||
|
@ -45,13 +46,16 @@ perl/Makefile.config
|
||||||
/src/libexpr/parser-tab.hh
|
/src/libexpr/parser-tab.hh
|
||||||
/src/libexpr/parser-tab.output
|
/src/libexpr/parser-tab.output
|
||||||
/src/libexpr/nix.tbl
|
/src/libexpr/nix.tbl
|
||||||
|
/src/libexpr/tests
|
||||||
/tests/unit/libexpr/libnixexpr-tests
|
/tests/unit/libexpr/libnixexpr-tests
|
||||||
|
|
||||||
# /src/libstore/
|
# /src/libstore/
|
||||||
*.gen.*
|
*.gen.*
|
||||||
|
/src/libstore/tests
|
||||||
/tests/unit/libstore/libnixstore-tests
|
/tests/unit/libstore/libnixstore-tests
|
||||||
|
|
||||||
# /src/libutil/
|
# /src/libutil/
|
||||||
|
/src/libutil/tests
|
||||||
/tests/unit/libutil/libnixutil-tests
|
/tests/unit/libutil/libnixutil-tests
|
||||||
|
|
||||||
/src/nix/nix
|
/src/nix/nix
|
||||||
|
@ -94,6 +98,7 @@ perl/Makefile.config
|
||||||
/tests/functional/ca/config.nix
|
/tests/functional/ca/config.nix
|
||||||
/tests/functional/dyn-drv/config.nix
|
/tests/functional/dyn-drv/config.nix
|
||||||
/tests/functional/repl-result-out
|
/tests/functional/repl-result-out
|
||||||
|
/tests/functional/debugger-test-out
|
||||||
/tests/functional/test-libstoreconsumer/test-libstoreconsumer
|
/tests/functional/test-libstoreconsumer/test-libstoreconsumer
|
||||||
|
|
||||||
# /tests/functional/lang/
|
# /tests/functional/lang/
|
||||||
|
@ -137,10 +142,12 @@ GTAGS
|
||||||
|
|
||||||
# auto-generated compilation database
|
# auto-generated compilation database
|
||||||
compile_commands.json
|
compile_commands.json
|
||||||
|
*.compile_commands.json
|
||||||
|
|
||||||
nix-rust/target
|
nix-rust/target
|
||||||
|
|
||||||
result
|
result
|
||||||
|
result-*
|
||||||
|
|
||||||
# IDE
|
# IDE
|
||||||
.vscode/
|
.vscode/
|
||||||
|
|
2
.version
2
.version
|
@ -1 +1 @@
|
||||||
2.20.0
|
2.22.0
|
||||||
|
|
|
@ -63,11 +63,11 @@ Check out the [security policy](https://github.com/NixOS/nix/security/policy).
|
||||||
- Functional tests – [`tests/functional/**.sh`](./tests/functional)
|
- Functional tests – [`tests/functional/**.sh`](./tests/functional)
|
||||||
- Unit tests – [`src/*/tests`](./src/)
|
- Unit tests – [`src/*/tests`](./src/)
|
||||||
- Integration tests – [`tests/nixos/*`](./tests/nixos)
|
- Integration tests – [`tests/nixos/*`](./tests/nixos)
|
||||||
- [ ] User documentation in the [manual](..doc/manual/src)
|
- [ ] User documentation in the [manual](./doc/manual/src)
|
||||||
- [ ] API documentation in header files
|
- [ ] API documentation in header files
|
||||||
- [ ] Code and comments are self-explanatory
|
- [ ] Code and comments are self-explanatory
|
||||||
- [ ] Commit message explains **why** the change was made
|
- [ ] Commit message explains **why** the change was made
|
||||||
- [ ] New feature or incompatible change: updated [release notes](./doc/manual/src/release-notes/rl-next.md)
|
- [ ] New feature or incompatible change: [add a release note](https://nixos.org/manual/nix/stable/contributing/hacking#add-a-release-note)
|
||||||
|
|
||||||
7. If you need additional feedback or help to getting pull request into shape, ask other contributors using [@mentions](https://docs.github.com/en/get-started/writing-on-github/getting-started-with-writing-and-formatting-on-github/basic-writing-and-formatting-syntax#mentioning-people-and-teams).
|
7. If you need additional feedback or help to getting pull request into shape, ask other contributors using [@mentions](https://docs.github.com/en/get-started/writing-on-github/getting-started-with-writing-and-formatting-on-github/basic-writing-and-formatting-syntax#mentioning-people-and-teams).
|
||||||
|
|
||||||
|
|
80
Makefile
80
Makefile
|
@ -1,8 +1,12 @@
|
||||||
|
# External build directory support
|
||||||
|
|
||||||
include mk/build-dir.mk
|
include mk/build-dir.mk
|
||||||
|
|
||||||
-include $(buildprefix)Makefile.config
|
-include $(buildprefix)Makefile.config
|
||||||
clean-files += $(buildprefix)Makefile.config
|
clean-files += $(buildprefix)Makefile.config
|
||||||
|
|
||||||
|
# List makefiles
|
||||||
|
|
||||||
ifeq ($(ENABLE_BUILD), yes)
|
ifeq ($(ENABLE_BUILD), yes)
|
||||||
makefiles = \
|
makefiles = \
|
||||||
mk/precompiled-headers.mk \
|
mk/precompiled-headers.mk \
|
||||||
|
@ -24,7 +28,7 @@ makefiles = \
|
||||||
misc/upstart/local.mk
|
misc/upstart/local.mk
|
||||||
endif
|
endif
|
||||||
|
|
||||||
ifeq ($(ENABLE_BUILD)_$(ENABLE_TESTS), yes_yes)
|
ifeq ($(ENABLE_UNIT_TESTS), yes)
|
||||||
makefiles += \
|
makefiles += \
|
||||||
tests/unit/libutil/local.mk \
|
tests/unit/libutil/local.mk \
|
||||||
tests/unit/libutil-support/local.mk \
|
tests/unit/libutil-support/local.mk \
|
||||||
|
@ -34,18 +38,29 @@ makefiles += \
|
||||||
tests/unit/libexpr-support/local.mk
|
tests/unit/libexpr-support/local.mk
|
||||||
endif
|
endif
|
||||||
|
|
||||||
ifeq ($(ENABLE_TESTS), yes)
|
ifeq ($(ENABLE_FUNCTIONAL_TESTS), yes)
|
||||||
makefiles += \
|
makefiles += \
|
||||||
tests/functional/local.mk \
|
tests/functional/local.mk \
|
||||||
tests/functional/ca/local.mk \
|
tests/functional/ca/local.mk \
|
||||||
|
tests/functional/git-hashing/local.mk \
|
||||||
tests/functional/dyn-drv/local.mk \
|
tests/functional/dyn-drv/local.mk \
|
||||||
tests/functional/test-libstoreconsumer/local.mk \
|
tests/functional/test-libstoreconsumer/local.mk \
|
||||||
tests/functional/plugins/local.mk
|
tests/functional/plugins/local.mk
|
||||||
else
|
|
||||||
makefiles += \
|
|
||||||
mk/disable-tests.mk
|
|
||||||
endif
|
endif
|
||||||
|
|
||||||
|
# Some makefiles require access to built programs and must be included late.
|
||||||
|
makefiles-late =
|
||||||
|
|
||||||
|
ifeq ($(ENABLE_DOC_GEN), yes)
|
||||||
|
makefiles-late += doc/manual/local.mk
|
||||||
|
endif
|
||||||
|
|
||||||
|
ifeq ($(ENABLE_INTERNAL_API_DOCS), yes)
|
||||||
|
makefiles-late += doc/internal-api/local.mk
|
||||||
|
endif
|
||||||
|
|
||||||
|
# Miscellaneous global Flags
|
||||||
|
|
||||||
OPTIMIZE = 1
|
OPTIMIZE = 1
|
||||||
|
|
||||||
ifeq ($(OPTIMIZE), 1)
|
ifeq ($(OPTIMIZE), 1)
|
||||||
|
@ -53,15 +68,58 @@ ifeq ($(OPTIMIZE), 1)
|
||||||
GLOBAL_LDFLAGS += $(CXXLTO)
|
GLOBAL_LDFLAGS += $(CXXLTO)
|
||||||
else
|
else
|
||||||
GLOBAL_CXXFLAGS += -O0 -U_FORTIFY_SOURCE
|
GLOBAL_CXXFLAGS += -O0 -U_FORTIFY_SOURCE
|
||||||
|
unexport NIX_HARDENING_ENABLE
|
||||||
endif
|
endif
|
||||||
|
|
||||||
|
include mk/platform.mk
|
||||||
|
|
||||||
|
ifdef HOST_WINDOWS
|
||||||
|
# Windows DLLs are stricter about symbol visibility than Unix shared
|
||||||
|
# objects --- see https://gcc.gnu.org/wiki/Visibility for details.
|
||||||
|
# This is a temporary sledgehammer to export everything like on Unix,
|
||||||
|
# and not detail with this yet.
|
||||||
|
#
|
||||||
|
# TODO do not do this, and instead do fine-grained export annotations.
|
||||||
|
GLOBAL_LDFLAGS += -Wl,--export-all-symbols
|
||||||
|
endif
|
||||||
|
|
||||||
|
GLOBAL_CXXFLAGS += -g -Wall -Wimplicit-fallthrough -include $(buildprefix)config.h -std=c++2a -I src
|
||||||
|
|
||||||
|
# Include the main lib, causing rules to be defined
|
||||||
|
|
||||||
include mk/lib.mk
|
include mk/lib.mk
|
||||||
|
|
||||||
# Must be included after `mk/lib.mk` so rules refer to variables defined
|
# Fallback stub rules for better UX when things are disabled
|
||||||
# by the library. Rules are not "lazy" like variables, unfortunately.
|
#
|
||||||
ifeq ($(ENABLE_BUILD), yes)
|
# These must be defined after `mk/lib.mk`. Otherwise the first rule
|
||||||
$(eval $(call include-sub-makefile, doc/manual/local.mk))
|
# incorrectly becomes the default target.
|
||||||
$(eval $(call include-sub-makefile, doc/internal-api/local.mk))
|
|
||||||
|
ifneq ($(ENABLE_UNIT_TESTS), yes)
|
||||||
|
.PHONY: check
|
||||||
|
check:
|
||||||
|
@echo "Unit tests are disabled. Configure without '--disable-unit-tests', or avoid calling 'make check'."
|
||||||
|
@exit 1
|
||||||
endif
|
endif
|
||||||
|
|
||||||
GLOBAL_CXXFLAGS += -g -Wall -include config.h -std=c++2a -I src
|
ifneq ($(ENABLE_FUNCTIONAL_TESTS), yes)
|
||||||
|
.PHONY: installcheck
|
||||||
|
installcheck:
|
||||||
|
@echo "Functional tests are disabled. Configure without '--disable-functional-tests', or avoid calling 'make installcheck'."
|
||||||
|
@exit 1
|
||||||
|
endif
|
||||||
|
|
||||||
|
# Documentation fallback stub rules.
|
||||||
|
|
||||||
|
ifneq ($(ENABLE_DOC_GEN), yes)
|
||||||
|
.PHONY: manual-html manpages
|
||||||
|
manual-html manpages:
|
||||||
|
@echo "Generated docs are disabled. Configure without '--disable-doc-gen', or avoid calling 'make manpages' and 'make manual-html'."
|
||||||
|
@exit 1
|
||||||
|
endif
|
||||||
|
|
||||||
|
ifneq ($(ENABLE_INTERNAL_API_DOCS), yes)
|
||||||
|
.PHONY: internal-api-html
|
||||||
|
internal-api-html:
|
||||||
|
@echo "Internal API docs are disabled. Configure with '--enable-internal-api-docs', or avoid calling 'make internal-api-html'."
|
||||||
|
@exit 1
|
||||||
|
endif
|
||||||
|
|
|
@ -9,8 +9,11 @@ CXXFLAGS = @CXXFLAGS@
|
||||||
CXXLTO = @CXXLTO@
|
CXXLTO = @CXXLTO@
|
||||||
EDITLINE_LIBS = @EDITLINE_LIBS@
|
EDITLINE_LIBS = @EDITLINE_LIBS@
|
||||||
ENABLE_BUILD = @ENABLE_BUILD@
|
ENABLE_BUILD = @ENABLE_BUILD@
|
||||||
|
ENABLE_DOC_GEN = @ENABLE_DOC_GEN@
|
||||||
|
ENABLE_FUNCTIONAL_TESTS = @ENABLE_FUNCTIONAL_TESTS@
|
||||||
|
ENABLE_INTERNAL_API_DOCS = @ENABLE_INTERNAL_API_DOCS@
|
||||||
ENABLE_S3 = @ENABLE_S3@
|
ENABLE_S3 = @ENABLE_S3@
|
||||||
ENABLE_TESTS = @ENABLE_TESTS@
|
ENABLE_UNIT_TESTS = @ENABLE_UNIT_TESTS@
|
||||||
GTEST_LIBS = @GTEST_LIBS@
|
GTEST_LIBS = @GTEST_LIBS@
|
||||||
HAVE_LIBCPUID = @HAVE_LIBCPUID@
|
HAVE_LIBCPUID = @HAVE_LIBCPUID@
|
||||||
HAVE_SECCOMP = @HAVE_SECCOMP@
|
HAVE_SECCOMP = @HAVE_SECCOMP@
|
||||||
|
@ -26,7 +29,6 @@ LOWDOWN_LIBS = @LOWDOWN_LIBS@
|
||||||
OPENSSL_LIBS = @OPENSSL_LIBS@
|
OPENSSL_LIBS = @OPENSSL_LIBS@
|
||||||
PACKAGE_NAME = @PACKAGE_NAME@
|
PACKAGE_NAME = @PACKAGE_NAME@
|
||||||
PACKAGE_VERSION = @PACKAGE_VERSION@
|
PACKAGE_VERSION = @PACKAGE_VERSION@
|
||||||
RAPIDCHECK_HEADERS = @RAPIDCHECK_HEADERS@
|
|
||||||
SHELL = @bash@
|
SHELL = @bash@
|
||||||
SODIUM_LIBS = @SODIUM_LIBS@
|
SODIUM_LIBS = @SODIUM_LIBS@
|
||||||
SQLITE3_LIBS = @SQLITE3_LIBS@
|
SQLITE3_LIBS = @SQLITE3_LIBS@
|
||||||
|
@ -36,12 +38,10 @@ checkbindir = @checkbindir@
|
||||||
checklibdir = @checklibdir@
|
checklibdir = @checklibdir@
|
||||||
datadir = @datadir@
|
datadir = @datadir@
|
||||||
datarootdir = @datarootdir@
|
datarootdir = @datarootdir@
|
||||||
doc_generate = @doc_generate@
|
|
||||||
docdir = @docdir@
|
docdir = @docdir@
|
||||||
embedded_sandbox_shell = @embedded_sandbox_shell@
|
embedded_sandbox_shell = @embedded_sandbox_shell@
|
||||||
exec_prefix = @exec_prefix@
|
exec_prefix = @exec_prefix@
|
||||||
includedir = @includedir@
|
includedir = @includedir@
|
||||||
internal_api_docs = @internal_api_docs@
|
|
||||||
libdir = @libdir@
|
libdir = @libdir@
|
||||||
libexecdir = @libexecdir@
|
libexecdir = @libexecdir@
|
||||||
localstatedir = @localstatedir@
|
localstatedir = @localstatedir@
|
||||||
|
|
1700
config/config.guess
vendored
1700
config/config.guess
vendored
File diff suppressed because it is too large
Load diff
1860
config/config.sub
vendored
1860
config/config.sub
vendored
File diff suppressed because it is too large
Load diff
149
configure.ac
149
configure.ac
|
@ -47,6 +47,10 @@ AC_DEFINE_UNQUOTED(SYSTEM, ["$system"], [platform identifier ('cpu-os')])
|
||||||
# State should be stored in /nix/var, unless the user overrides it explicitly.
|
# State should be stored in /nix/var, unless the user overrides it explicitly.
|
||||||
test "$localstatedir" = '${prefix}/var' && localstatedir=/nix/var
|
test "$localstatedir" = '${prefix}/var' && localstatedir=/nix/var
|
||||||
|
|
||||||
|
# Assign a default value to C{,XX}FLAGS as the default configure script sets them
|
||||||
|
# to -O2 otherwise, which we don't want to have hardcoded
|
||||||
|
CFLAGS=${CFLAGS-""}
|
||||||
|
CXXFLAGS=${CXXFLAGS-""}
|
||||||
|
|
||||||
AC_PROG_CC
|
AC_PROG_CC
|
||||||
AC_PROG_CXX
|
AC_PROG_CXX
|
||||||
|
@ -122,7 +126,6 @@ AC_PATH_PROG(flex, flex, false)
|
||||||
AC_PATH_PROG(bison, bison, false)
|
AC_PATH_PROG(bison, bison, false)
|
||||||
AC_PATH_PROG(dot, dot)
|
AC_PATH_PROG(dot, dot)
|
||||||
AC_PATH_PROG(lsof, lsof, lsof)
|
AC_PATH_PROG(lsof, lsof, lsof)
|
||||||
NEED_PROG(jq, jq)
|
|
||||||
|
|
||||||
|
|
||||||
AC_SUBST(coreutils, [$(dirname $(type -p cat))])
|
AC_SUBST(coreutils, [$(dirname $(type -p cat))])
|
||||||
|
@ -133,6 +136,48 @@ AC_ARG_WITH(store-dir, AS_HELP_STRING([--with-store-dir=PATH],[path of the Nix s
|
||||||
AC_SUBST(storedir)
|
AC_SUBST(storedir)
|
||||||
|
|
||||||
|
|
||||||
|
# Running the functional tests without building Nix is useful for testing
|
||||||
|
# different pre-built versions of Nix against each other.
|
||||||
|
AC_ARG_ENABLE(build, AS_HELP_STRING([--disable-build],[Do not build nix]),
|
||||||
|
ENABLE_BUILD=$enableval, ENABLE_BUILD=yes)
|
||||||
|
AC_SUBST(ENABLE_BUILD)
|
||||||
|
|
||||||
|
# Building without unit tests is useful for bootstrapping with a smaller footprint
|
||||||
|
# or running the tests in a separate derivation. Otherwise, we do compile and
|
||||||
|
# run them.
|
||||||
|
|
||||||
|
AC_ARG_ENABLE(unit-tests, AS_HELP_STRING([--disable-unit-tests],[Do not build the tests]),
|
||||||
|
ENABLE_UNIT_TESTS=$enableval, ENABLE_UNIT_TESTS=$ENABLE_BUILD)
|
||||||
|
AC_SUBST(ENABLE_UNIT_TESTS)
|
||||||
|
|
||||||
|
AS_IF(
|
||||||
|
[test "$ENABLE_BUILD" == "no" && test "$ENABLE_UNIT_TESTS" == "yes"],
|
||||||
|
[AC_MSG_ERROR([Cannot enable unit tests when building overall is disabled. Please do not pass '--enable-unit-tests' or do not pass '--disable-build'.])])
|
||||||
|
|
||||||
|
AC_ARG_ENABLE(functional-tests, AS_HELP_STRING([--disable-functional-tests],[Do not build the tests]),
|
||||||
|
ENABLE_FUNCTIONAL_TESTS=$enableval, ENABLE_FUNCTIONAL_TESTS=yes)
|
||||||
|
AC_SUBST(ENABLE_FUNCTIONAL_TESTS)
|
||||||
|
|
||||||
|
# documentation generation switch
|
||||||
|
AC_ARG_ENABLE(doc-gen, AS_HELP_STRING([--disable-doc-gen],[disable documentation generation]),
|
||||||
|
ENABLE_DOC_GEN=$enableval, ENABLE_DOC_GEN=$ENABLE_BUILD)
|
||||||
|
AC_SUBST(ENABLE_DOC_GEN)
|
||||||
|
|
||||||
|
AS_IF(
|
||||||
|
[test "$ENABLE_BUILD" == "no" && test "$ENABLE_DOC_GEN" == "yes"],
|
||||||
|
[AC_MSG_ERROR([Cannot enable generated docs when building overall is disabled. Please do not pass '--enable-doc-gen' or do not pass '--disable-build'.])])
|
||||||
|
|
||||||
|
# Building without API docs is the default as Nix' C++ interfaces are internal and unstable.
|
||||||
|
AC_ARG_ENABLE(internal-api-docs, AS_HELP_STRING([--enable-internal-api-docs],[Build API docs for Nix's internal unstable C++ interfaces]),
|
||||||
|
ENABLE_INTERNAL_API_DOCS=$enableval, ENABLE_INTERNAL_API_DOCS=no)
|
||||||
|
AC_SUBST(ENABLE_INTERNAL_API_DOCS)
|
||||||
|
|
||||||
|
AS_IF(
|
||||||
|
[test "$ENABLE_FUNCTIONAL_TESTS" == "yes" || test "$ENABLE_DOC_GEN" == "yes"],
|
||||||
|
[NEED_PROG(jq, jq)])
|
||||||
|
|
||||||
|
AS_IF([test "$ENABLE_BUILD" == "yes"],[
|
||||||
|
|
||||||
# Look for boost, a required dependency.
|
# Look for boost, a required dependency.
|
||||||
# Note that AX_BOOST_BASE only exports *CPP* BOOST_CPPFLAGS, no CXX flags,
|
# Note that AX_BOOST_BASE only exports *CPP* BOOST_CPPFLAGS, no CXX flags,
|
||||||
# and CPPFLAGS are not passed to the C++ compiler automatically.
|
# and CPPFLAGS are not passed to the C++ compiler automatically.
|
||||||
|
@ -155,18 +200,6 @@ if test "x$GCC_ATOMIC_BUILTINS_NEED_LIBATOMIC" = xyes; then
|
||||||
LDFLAGS="-latomic $LDFLAGS"
|
LDFLAGS="-latomic $LDFLAGS"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Running the functional tests without building Nix is useful for testing
|
|
||||||
# different pre-built versions of Nix against each other.
|
|
||||||
AC_ARG_ENABLE(build, AS_HELP_STRING([--disable-build],[Do not build nix]),
|
|
||||||
ENABLE_BUILD=$enableval, ENABLE_BUILD=yes)
|
|
||||||
AC_SUBST(ENABLE_BUILD)
|
|
||||||
# Building without tests is useful for bootstrapping with a smaller footprint
|
|
||||||
# or running the tests in a separate derivation. Otherwise, we do compile and
|
|
||||||
# run them.
|
|
||||||
AC_ARG_ENABLE(tests, AS_HELP_STRING([--disable-tests],[Do not build the tests]),
|
|
||||||
ENABLE_TESTS=$enableval, ENABLE_TESTS=yes)
|
|
||||||
AC_SUBST(ENABLE_TESTS)
|
|
||||||
|
|
||||||
AC_ARG_ENABLE(install-unit-tests, AS_HELP_STRING([--enable-install-unit-tests],[Install the unit tests for running later (default no)]),
|
AC_ARG_ENABLE(install-unit-tests, AS_HELP_STRING([--enable-install-unit-tests],[Install the unit tests for running later (default no)]),
|
||||||
INSTALL_UNIT_TESTS=$enableval, INSTALL_UNIT_TESTS=no)
|
INSTALL_UNIT_TESTS=$enableval, INSTALL_UNIT_TESTS=no)
|
||||||
AC_SUBST(INSTALL_UNIT_TESTS)
|
AC_SUBST(INSTALL_UNIT_TESTS)
|
||||||
|
@ -179,11 +212,6 @@ AC_ARG_WITH(check-lib-dir, AS_HELP_STRING([--with-check-lib-dir=PATH],[path to i
|
||||||
checklibdir=$withval, checklibdir=$libdir)
|
checklibdir=$withval, checklibdir=$libdir)
|
||||||
AC_SUBST(checklibdir)
|
AC_SUBST(checklibdir)
|
||||||
|
|
||||||
# Building without API docs is the default as Nix' C++ interfaces are internal and unstable.
|
|
||||||
AC_ARG_ENABLE(internal_api_docs, AS_HELP_STRING([--enable-internal-api-docs],[Build API docs for Nix's internal unstable C++ interfaces]),
|
|
||||||
internal_api_docs=$enableval, internal_api_docs=no)
|
|
||||||
AC_SUBST(internal_api_docs)
|
|
||||||
|
|
||||||
# LTO is currently broken with clang for unknown reasons; ld segfaults in the llvm plugin
|
# LTO is currently broken with clang for unknown reasons; ld segfaults in the llvm plugin
|
||||||
AC_ARG_ENABLE(lto, AS_HELP_STRING([--enable-lto],[Enable LTO (only supported with GCC) [default=no]]),
|
AC_ARG_ENABLE(lto, AS_HELP_STRING([--enable-lto],[Enable LTO (only supported with GCC) [default=no]]),
|
||||||
lto=$enableval, lto=no)
|
lto=$enableval, lto=no)
|
||||||
|
@ -227,17 +255,25 @@ PKG_CHECK_MODULES([SQLITE3], [sqlite3 >= 3.6.19], [CXXFLAGS="$SQLITE3_CFLAGS $CX
|
||||||
# Look for libcurl, a required dependency.
|
# Look for libcurl, a required dependency.
|
||||||
PKG_CHECK_MODULES([LIBCURL], [libcurl], [CXXFLAGS="$LIBCURL_CFLAGS $CXXFLAGS"])
|
PKG_CHECK_MODULES([LIBCURL], [libcurl], [CXXFLAGS="$LIBCURL_CFLAGS $CXXFLAGS"])
|
||||||
|
|
||||||
# Look for editline, a required dependency.
|
# Look for editline or readline, a required dependency.
|
||||||
# The the libeditline.pc file was added only in libeditline >= 1.15.2,
|
# The the libeditline.pc file was added only in libeditline >= 1.15.2,
|
||||||
# see https://github.com/troglobit/editline/commit/0a8f2ef4203c3a4a4726b9dd1336869cd0da8607,
|
# see https://github.com/troglobit/editline/commit/0a8f2ef4203c3a4a4726b9dd1336869cd0da8607,
|
||||||
# but e.g. Ubuntu 16.04 has an older version, so we fall back to searching for
|
# Older versions are no longer supported.
|
||||||
# editline.h when the pkg-config approach fails.
|
AC_ARG_WITH(
|
||||||
PKG_CHECK_MODULES([EDITLINE], [libeditline], [CXXFLAGS="$EDITLINE_CFLAGS $CXXFLAGS"], [
|
[readline-flavor],
|
||||||
AC_CHECK_HEADERS([editline.h], [true],
|
AS_HELP_STRING([--with-readline-flavor],[Which library to use for nice line editting with the Nix language REPL" [default=editline]]),
|
||||||
[AC_MSG_ERROR([Nix requires libeditline; it was found neither via pkg-config nor its normal header.])])
|
[readline_flavor=$withval],
|
||||||
AC_SEARCH_LIBS([readline read_history], [editline], [],
|
[readline_flavor=editline])
|
||||||
[AC_MSG_ERROR([Nix requires libeditline; it was not found via pkg-config, but via its header, but required functions do not work. Maybe it is too old? >= 1.14 is required.])])
|
AS_CASE(["$readline_flavor"],
|
||||||
])
|
[editline], [
|
||||||
|
readline_flavor_pc=libeditline
|
||||||
|
],
|
||||||
|
[readline], [
|
||||||
|
readline_flavor_pc=readline
|
||||||
|
AC_DEFINE([USE_READLINE], [1], [Use readline instead of editline])
|
||||||
|
],
|
||||||
|
[AC_MSG_ERROR([bad value "$readline_flavor" for --with-readline-flavor, must be one of: editline, readline])])
|
||||||
|
PKG_CHECK_MODULES([EDITLINE], [$readline_flavor_pc], [CXXFLAGS="$EDITLINE_CFLAGS $CXXFLAGS"])
|
||||||
|
|
||||||
# Look for libsodium.
|
# Look for libsodium.
|
||||||
PKG_CHECK_MODULES([SODIUM], [libsodium], [CXXFLAGS="$SODIUM_CFLAGS $CXXFLAGS"])
|
PKG_CHECK_MODULES([SODIUM], [libsodium], [CXXFLAGS="$SODIUM_CFLAGS $CXXFLAGS"])
|
||||||
|
@ -283,7 +319,13 @@ esac
|
||||||
AC_SUBST(HAVE_SECCOMP, [$have_seccomp])
|
AC_SUBST(HAVE_SECCOMP, [$have_seccomp])
|
||||||
|
|
||||||
# Optional dependencies for better normalizing file system data
|
# Optional dependencies for better normalizing file system data
|
||||||
AC_CHECK_HEADERS[sys/xattr.h]
|
AC_CHECK_HEADERS([sys/xattr.h])
|
||||||
|
AS_IF([test "$ac_cv_header_sys_xattr_h" = "yes"],[
|
||||||
|
AC_CHECK_FUNCS([llistxattr lremovexattr])
|
||||||
|
AS_IF([test "$ac_cv_func_llistxattr" = "yes" && test "$ac_cv_func_lremovexattr" = "yes"],[
|
||||||
|
AC_DEFINE([HAVE_ACL_SUPPORT], [1], [Define if we can manipulate file system Access Control Lists])
|
||||||
|
])
|
||||||
|
])
|
||||||
|
|
||||||
# Look for aws-cpp-sdk-s3.
|
# Look for aws-cpp-sdk-s3.
|
||||||
AC_LANG_PUSH(C++)
|
AC_LANG_PUSH(C++)
|
||||||
|
@ -310,48 +352,35 @@ if test "$gc" = yes; then
|
||||||
AC_DEFINE(HAVE_BOEHMGC, 1, [Whether to use the Boehm garbage collector.])
|
AC_DEFINE(HAVE_BOEHMGC, 1, [Whether to use the Boehm garbage collector.])
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
AS_IF([test "$ENABLE_UNIT_TESTS" == "yes"],[
|
||||||
if test "$ENABLE_TESTS" = yes; then
|
|
||||||
|
|
||||||
# Look for gtest.
|
# Look for gtest.
|
||||||
PKG_CHECK_MODULES([GTEST], [gtest_main])
|
PKG_CHECK_MODULES([GTEST], [gtest_main gmock_main])
|
||||||
|
|
||||||
|
|
||||||
# Look for rapidcheck.
|
# Look for rapidcheck.
|
||||||
AC_ARG_VAR([RAPIDCHECK_HEADERS], [include path of gtest headers shipped by RAPIDCHECK])
|
PKG_CHECK_MODULES([RAPIDCHECK], [rapidcheck rapidcheck_gtest])
|
||||||
# No pkg-config yet, https://github.com/emil-e/rapidcheck/issues/302
|
|
||||||
AC_LANG_PUSH(C++)
|
|
||||||
AC_SUBST(RAPIDCHECK_HEADERS)
|
|
||||||
[CXXFLAGS="-I $RAPIDCHECK_HEADERS $CXXFLAGS"]
|
|
||||||
[LIBS="-lrapidcheck -lgtest $LIBS"]
|
|
||||||
AC_CHECK_HEADERS([rapidcheck/gtest.h], [], [], [#include <gtest/gtest.h>])
|
|
||||||
dnl AC_CHECK_LIB doesn't work for C++ libs with mangled symbols
|
|
||||||
AC_LINK_IFELSE([
|
|
||||||
AC_LANG_PROGRAM([[
|
|
||||||
#include <gtest/gtest.h>
|
|
||||||
#include <rapidcheck/gtest.h>
|
|
||||||
]], [[
|
|
||||||
return RUN_ALL_TESTS();
|
|
||||||
]])
|
|
||||||
],
|
|
||||||
[],
|
|
||||||
[AC_MSG_ERROR([librapidcheck is not found.])])
|
|
||||||
AC_LANG_POP(C++)
|
|
||||||
|
|
||||||
fi
|
])
|
||||||
|
|
||||||
# Look for nlohmann/json.
|
# Look for nlohmann/json.
|
||||||
PKG_CHECK_MODULES([NLOHMANN_JSON], [nlohmann_json >= 3.9])
|
PKG_CHECK_MODULES([NLOHMANN_JSON], [nlohmann_json >= 3.9])
|
||||||
|
|
||||||
|
|
||||||
# documentation generation switch
|
|
||||||
AC_ARG_ENABLE(doc-gen, AS_HELP_STRING([--disable-doc-gen],[disable documentation generation]),
|
|
||||||
doc_generate=$enableval, doc_generate=yes)
|
|
||||||
AC_SUBST(doc_generate)
|
|
||||||
|
|
||||||
|
|
||||||
# Look for lowdown library.
|
# Look for lowdown library.
|
||||||
PKG_CHECK_MODULES([LOWDOWN], [lowdown >= 0.9.0], [CXXFLAGS="$LOWDOWN_CFLAGS $CXXFLAGS"])
|
AC_ARG_ENABLE([markdown], AS_HELP_STRING([--enable-markdown], [Enable Markdown rendering in the Nix binary (requires lowdown) [default=auto]]),
|
||||||
|
enable_markdown=$enableval, enable_markdown=auto)
|
||||||
|
AS_CASE(["$enable_markdown"],
|
||||||
|
[yes | auto], [
|
||||||
|
PKG_CHECK_MODULES([LOWDOWN], [lowdown >= 0.9.0], [
|
||||||
|
CXXFLAGS="$LOWDOWN_CFLAGS $CXXFLAGS"
|
||||||
|
have_lowdown=1
|
||||||
|
AC_DEFINE(HAVE_LOWDOWN, 1, [Whether lowdown is available and should be used for Markdown rendering.])
|
||||||
|
], [
|
||||||
|
AS_IF([test "x$enable_markdown" == "xyes"], [AC_MSG_ERROR([--enable-markdown was specified, but lowdown was not found.])])
|
||||||
|
])
|
||||||
|
],
|
||||||
|
[no], [have_lowdown=],
|
||||||
|
[AC_MSG_ERROR([bad value "$enable_markdown" for --enable-markdown, must be one of: yes, no, auto])])
|
||||||
|
|
||||||
|
|
||||||
# Look for libgit2.
|
# Look for libgit2.
|
||||||
|
@ -388,6 +417,8 @@ if test "$embedded_sandbox_shell" = yes; then
|
||||||
AC_DEFINE(HAVE_EMBEDDED_SANDBOX_SHELL, 1, [Include the sandbox shell in the Nix binary.])
|
AC_DEFINE(HAVE_EMBEDDED_SANDBOX_SHELL, 1, [Include the sandbox shell in the Nix binary.])
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
])
|
||||||
|
|
||||||
|
|
||||||
# Expand all variables in config.status.
|
# Expand all variables in config.status.
|
||||||
test "$prefix" = NONE && prefix=$ac_default_prefix
|
test "$prefix" = NONE && prefix=$ac_default_prefix
|
||||||
|
|
|
@ -81,7 +81,7 @@ EXPAND_ONLY_PREDEF = YES
|
||||||
# RECURSIVE has no effect here.
|
# RECURSIVE has no effect here.
|
||||||
# This tag requires that the tag SEARCH_INCLUDES is set to YES.
|
# This tag requires that the tag SEARCH_INCLUDES is set to YES.
|
||||||
|
|
||||||
INCLUDE_PATH = @RAPIDCHECK_HEADERS@
|
INCLUDE_PATH =
|
||||||
|
|
||||||
# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then this
|
# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then this
|
||||||
# tag can be used to specify a list of macro names that should be expanded. The
|
# tag can be used to specify a list of macro names that should be expanded. The
|
||||||
|
|
|
@ -1,19 +1,7 @@
|
||||||
.PHONY: internal-api-html
|
|
||||||
|
|
||||||
ifeq ($(internal_api_docs), yes)
|
|
||||||
|
|
||||||
$(docdir)/internal-api/html/index.html $(docdir)/internal-api/latex: $(d)/doxygen.cfg
|
$(docdir)/internal-api/html/index.html $(docdir)/internal-api/latex: $(d)/doxygen.cfg
|
||||||
mkdir -p $(docdir)/internal-api
|
mkdir -p $(docdir)/internal-api
|
||||||
{ cat $< ; echo "OUTPUT_DIRECTORY=$(docdir)/internal-api" ; } | doxygen -
|
{ cat $< ; echo "OUTPUT_DIRECTORY=$(docdir)/internal-api" ; } | doxygen -
|
||||||
|
|
||||||
# Generate the HTML API docs for Nix's unstable internal interfaces.
|
# Generate the HTML API docs for Nix's unstable internal interfaces.
|
||||||
|
.PHONY: internal-api-html
|
||||||
internal-api-html: $(docdir)/internal-api/html/index.html
|
internal-api-html: $(docdir)/internal-api/html/index.html
|
||||||
|
|
||||||
else
|
|
||||||
|
|
||||||
# Make a nicer error message
|
|
||||||
internal-api-html:
|
|
||||||
@echo "Internal API docs are disabled. Configure with '--enable-internal-api-docs', or avoid calling 'make internal-api-html'."
|
|
||||||
@exit 1
|
|
||||||
|
|
||||||
endif
|
|
||||||
|
|
|
@ -6,6 +6,8 @@ additional-css = ["custom.css"]
|
||||||
additional-js = ["redirects.js"]
|
additional-js = ["redirects.js"]
|
||||||
edit-url-template = "https://github.com/NixOS/nix/tree/master/doc/manual/{path}"
|
edit-url-template = "https://github.com/NixOS/nix/tree/master/doc/manual/{path}"
|
||||||
git-repository-url = "https://github.com/NixOS/nix"
|
git-repository-url = "https://github.com/NixOS/nix"
|
||||||
|
fold.enable = true
|
||||||
|
fold.level = 1
|
||||||
|
|
||||||
[preprocessor.anchors]
|
[preprocessor.anchors]
|
||||||
renderers = ["html"]
|
renderers = ["html"]
|
||||||
|
|
|
@ -8,7 +8,15 @@ let
|
||||||
showBuiltin = name: { doc, args, arity, experimental-feature }:
|
showBuiltin = name: { doc, args, arity, experimental-feature }:
|
||||||
let
|
let
|
||||||
experimentalNotice = optionalString (experimental-feature != null) ''
|
experimentalNotice = optionalString (experimental-feature != null) ''
|
||||||
This function is only available if the [${experimental-feature}](@docroot@/contributing/experimental-features.md#xp-feature-${experimental-feature}) experimental feature is enabled.
|
> **Note**
|
||||||
|
>
|
||||||
|
> This function is only available if the [`${experimental-feature}` experimental feature](@docroot@/contributing/experimental-features.md#xp-feature-${experimental-feature}) is enabled.
|
||||||
|
>
|
||||||
|
> For example, include the following in [`nix.conf`](@docroot@/command-ref/conf-file.md):
|
||||||
|
>
|
||||||
|
> ```
|
||||||
|
> extra-experimental-features = ${experimental-feature}
|
||||||
|
> ```
|
||||||
'';
|
'';
|
||||||
in
|
in
|
||||||
squash ''
|
squash ''
|
||||||
|
@ -17,10 +25,9 @@ let
|
||||||
</dt>
|
</dt>
|
||||||
<dd>
|
<dd>
|
||||||
|
|
||||||
${doc}
|
|
||||||
|
|
||||||
${experimentalNotice}
|
${experimentalNotice}
|
||||||
|
|
||||||
|
${doc}
|
||||||
</dd>
|
</dd>
|
||||||
'';
|
'';
|
||||||
listArgs = args: concatStringsSep " " (map (s: "<var>${s}</var>") args);
|
listArgs = args: concatStringsSep " " (map (s: "<var>${s}</var>") args);
|
||||||
|
|
|
@ -93,9 +93,6 @@ let
|
||||||
|
|
||||||
maybeProse =
|
maybeProse =
|
||||||
# FIXME: this is a horrible hack to keep `nix help-stores` working.
|
# FIXME: this is a horrible hack to keep `nix help-stores` working.
|
||||||
# the correct answer to this is to remove that command and replace it
|
|
||||||
# by statically generated manpages or the output of something like `nix
|
|
||||||
# store info <store type>`.
|
|
||||||
let
|
let
|
||||||
help-stores = ''
|
help-stores = ''
|
||||||
${index}
|
${index}
|
||||||
|
@ -121,7 +118,7 @@ let
|
||||||
};
|
};
|
||||||
in
|
in
|
||||||
optionalString (details ? doc) (
|
optionalString (details ? doc) (
|
||||||
if match "@store-types@" details.doc != [ ]
|
if match ".*@store-types@.*" details.doc != null
|
||||||
then help-stores
|
then help-stores
|
||||||
else details.doc
|
else details.doc
|
||||||
);
|
);
|
||||||
|
|
|
@ -20,10 +20,10 @@ let
|
||||||
else "`${setting}`";
|
else "`${setting}`";
|
||||||
# separate body to cleanly handle indentation
|
# separate body to cleanly handle indentation
|
||||||
body = ''
|
body = ''
|
||||||
${description}
|
|
||||||
|
|
||||||
${experimentalFeatureNote}
|
${experimentalFeatureNote}
|
||||||
|
|
||||||
|
${description}
|
||||||
|
|
||||||
**Default:** ${showDefault documentDefault defaultValue}
|
**Default:** ${showDefault documentDefault defaultValue}
|
||||||
|
|
||||||
${showAliases aliases}
|
${showAliases aliases}
|
||||||
|
|
|
@ -19,10 +19,10 @@ let
|
||||||
result = squash ''
|
result = squash ''
|
||||||
# ${name}
|
# ${name}
|
||||||
|
|
||||||
${doc}
|
|
||||||
|
|
||||||
${experimentalFeatureNote}
|
${experimentalFeatureNote}
|
||||||
|
|
||||||
|
${doc}
|
||||||
|
|
||||||
## Settings
|
## Settings
|
||||||
|
|
||||||
${showSettings { prefix = "store-${slug}"; inherit inlineHTML; } settings}
|
${showSettings { prefix = "store-${slug}"; inherit inlineHTML; } settings}
|
||||||
|
|
|
@ -1,5 +1,3 @@
|
||||||
ifeq ($(doc_generate),yes)
|
|
||||||
|
|
||||||
# The version of Nix used to generate the doc. Can also be
|
# The version of Nix used to generate the doc. Can also be
|
||||||
# `$(nix_INSTALL_PATH)` or just `nix` (to grap ambient from the `PATH`),
|
# `$(nix_INSTALL_PATH)` or just `nix` (to grap ambient from the `PATH`),
|
||||||
# if one prefers.
|
# if one prefers.
|
||||||
|
@ -180,6 +178,8 @@ manual-html: $(docdir)/manual/index.html
|
||||||
install: $(docdir)/manual/index.html
|
install: $(docdir)/manual/index.html
|
||||||
|
|
||||||
# Generate 'nix' manpages.
|
# Generate 'nix' manpages.
|
||||||
|
.PHONY: manpages
|
||||||
|
manpages: $(mandir)/man1/nix3-manpages
|
||||||
install: $(mandir)/man1/nix3-manpages
|
install: $(mandir)/man1/nix3-manpages
|
||||||
man: doc/manual/generated/man1/nix3-manpages
|
man: doc/manual/generated/man1/nix3-manpages
|
||||||
all: doc/manual/generated/man1/nix3-manpages
|
all: doc/manual/generated/man1/nix3-manpages
|
||||||
|
@ -225,5 +225,3 @@ $(docdir)/manual/index.html: $(MANUAL_SRCS) $(d)/book.toml $(d)/anchors.jq $(d)/
|
||||||
@rm -rf $(DESTDIR)$(docdir)/manual
|
@rm -rf $(DESTDIR)$(docdir)/manual
|
||||||
@mv $(DESTDIR)$(docdir)/manual.tmp/html $(DESTDIR)$(docdir)/manual
|
@mv $(DESTDIR)$(docdir)/manual.tmp/html $(DESTDIR)$(docdir)/manual
|
||||||
@rm -rf $(DESTDIR)$(docdir)/manual.tmp
|
@rm -rf $(DESTDIR)$(docdir)/manual.tmp
|
||||||
|
|
||||||
endif
|
|
||||||
|
|
|
@ -14,14 +14,15 @@
|
||||||
|
|
||||||
const redirects = {
|
const redirects = {
|
||||||
"index.html": {
|
"index.html": {
|
||||||
"part-advanced-topics": "advanced-topics/advanced-topics.html",
|
"part-advanced-topics": "advanced-topics/index.html",
|
||||||
"chap-tuning-cores-and-jobs": "advanced-topics/cores-vs-jobs.html",
|
"chap-tuning-cores-and-jobs": "advanced-topics/cores-vs-jobs.html",
|
||||||
"chap-diff-hook": "advanced-topics/diff-hook.html",
|
"chap-diff-hook": "advanced-topics/diff-hook.html",
|
||||||
"check-dirs-are-unregistered": "advanced-topics/diff-hook.html#check-dirs-are-unregistered",
|
"check-dirs-are-unregistered": "advanced-topics/diff-hook.html#check-dirs-are-unregistered",
|
||||||
"chap-distributed-builds": "advanced-topics/distributed-builds.html",
|
"chap-distributed-builds": "command-ref/conf-file.html#conf-builders",
|
||||||
"chap-post-build-hook": "advanced-topics/post-build-hook.html",
|
"chap-post-build-hook": "advanced-topics/post-build-hook.html",
|
||||||
"chap-post-build-hook-caveats": "advanced-topics/post-build-hook.html#implementation-caveats",
|
"chap-post-build-hook-caveats": "advanced-topics/post-build-hook.html#implementation-caveats",
|
||||||
"part-command-ref": "command-ref/command-ref.html",
|
"chap-writing-nix-expressions": "language/index.html",
|
||||||
|
"part-command-ref": "command-ref/index.html",
|
||||||
"conf-allow-import-from-derivation": "command-ref/conf-file.html#conf-allow-import-from-derivation",
|
"conf-allow-import-from-derivation": "command-ref/conf-file.html#conf-allow-import-from-derivation",
|
||||||
"conf-allow-new-privileges": "command-ref/conf-file.html#conf-allow-new-privileges",
|
"conf-allow-new-privileges": "command-ref/conf-file.html#conf-allow-new-privileges",
|
||||||
"conf-allowed-uris": "command-ref/conf-file.html#conf-allowed-uris",
|
"conf-allowed-uris": "command-ref/conf-file.html#conf-allowed-uris",
|
||||||
|
@ -260,7 +261,7 @@ const redirects = {
|
||||||
"sec-installer-proxy-settings": "installation/env-variables.html#proxy-environment-variables",
|
"sec-installer-proxy-settings": "installation/env-variables.html#proxy-environment-variables",
|
||||||
"sec-nix-ssl-cert-file": "installation/env-variables.html#nix_ssl_cert_file",
|
"sec-nix-ssl-cert-file": "installation/env-variables.html#nix_ssl_cert_file",
|
||||||
"sec-nix-ssl-cert-file-with-nix-daemon-and-macos": "installation/env-variables.html#nix_ssl_cert_file-with-macos-and-the-nix-daemon",
|
"sec-nix-ssl-cert-file-with-nix-daemon-and-macos": "installation/env-variables.html#nix_ssl_cert_file-with-macos-and-the-nix-daemon",
|
||||||
"chap-installation": "installation/installation.html",
|
"chap-installation": "installation/index.html",
|
||||||
"ch-installing-binary": "installation/installing-binary.html",
|
"ch-installing-binary": "installation/installing-binary.html",
|
||||||
"sect-macos-installation": "installation/installing-binary.html#macos-installation",
|
"sect-macos-installation": "installation/installing-binary.html#macos-installation",
|
||||||
"sect-macos-installation-change-store-prefix": "installation/installing-binary.html#macos-installation",
|
"sect-macos-installation-change-store-prefix": "installation/installing-binary.html#macos-installation",
|
||||||
|
@ -287,7 +288,7 @@ const redirects = {
|
||||||
"ssec-copy-closure": "package-management/copy-closure.html",
|
"ssec-copy-closure": "package-management/copy-closure.html",
|
||||||
"sec-garbage-collection": "package-management/garbage-collection.html",
|
"sec-garbage-collection": "package-management/garbage-collection.html",
|
||||||
"ssec-gc-roots": "package-management/garbage-collector-roots.html",
|
"ssec-gc-roots": "package-management/garbage-collector-roots.html",
|
||||||
"chap-package-management": "package-management/package-management.html",
|
"chap-package-management": "package-management/index.html",
|
||||||
"sec-profiles": "package-management/profiles.html",
|
"sec-profiles": "package-management/profiles.html",
|
||||||
"ssec-s3-substituter": "package-management/s3-substituter.html",
|
"ssec-s3-substituter": "package-management/s3-substituter.html",
|
||||||
"ssec-s3-substituter-anonymous-reads": "package-management/s3-substituter.html#anonymous-reads-to-your-s3-compatible-binary-cache",
|
"ssec-s3-substituter-anonymous-reads": "package-management/s3-substituter.html#anonymous-reads-to-your-s3-compatible-binary-cache",
|
||||||
|
@ -296,7 +297,7 @@ const redirects = {
|
||||||
"sec-sharing-packages": "package-management/sharing-packages.html",
|
"sec-sharing-packages": "package-management/sharing-packages.html",
|
||||||
"ssec-ssh-substituter": "package-management/ssh-substituter.html",
|
"ssec-ssh-substituter": "package-management/ssh-substituter.html",
|
||||||
"chap-quick-start": "quick-start.html",
|
"chap-quick-start": "quick-start.html",
|
||||||
"sec-relnotes": "release-notes/release-notes.html",
|
"sec-relnotes": "release-notes/index.html",
|
||||||
"ch-relnotes-0.10.1": "release-notes/rl-0.10.1.html",
|
"ch-relnotes-0.10.1": "release-notes/rl-0.10.1.html",
|
||||||
"ch-relnotes-0.10": "release-notes/rl-0.10.html",
|
"ch-relnotes-0.10": "release-notes/rl-0.10.html",
|
||||||
"ssec-relnotes-0.11": "release-notes/rl-0.11.html",
|
"ssec-relnotes-0.11": "release-notes/rl-0.11.html",
|
||||||
|
@ -357,7 +358,11 @@ const redirects = {
|
||||||
"one-time-setup": "testing.html#one-time-setup",
|
"one-time-setup": "testing.html#one-time-setup",
|
||||||
"using-the-ci-generated-installer-for-manual-testing": "testing.html#using-the-ci-generated-installer-for-manual-testing",
|
"using-the-ci-generated-installer-for-manual-testing": "testing.html#using-the-ci-generated-installer-for-manual-testing",
|
||||||
"characterization-testing": "#characterisation-testing-unit",
|
"characterization-testing": "#characterisation-testing-unit",
|
||||||
}
|
},
|
||||||
|
"glossary.html": {
|
||||||
|
"gloss-local-store": "store/types/local-store.html",
|
||||||
|
"gloss-chroot-store": "store/types/local-store.html",
|
||||||
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
// the following code matches the current page's URL against the set of redirects.
|
// the following code matches the current page's URL against the set of redirects.
|
||||||
|
|
|
@ -1,9 +0,0 @@
|
||||||
synopsis: Mounted SSH Store
|
|
||||||
issues: #7890
|
|
||||||
prs: #7912
|
|
||||||
description: {
|
|
||||||
|
|
||||||
Introduced the store [`mounted-ssh-ng://`](@docroot@/command-ref/new-cli/nix3-help-stores.md).
|
|
||||||
This store allows full access to a Nix store on a remote machine and additionally requires that the store be mounted in the local filesystem.
|
|
||||||
|
|
||||||
}
|
|
|
@ -1,8 +0,0 @@
|
||||||
synopsis: `nix config show`
|
|
||||||
issues: #7672
|
|
||||||
prs: #9477
|
|
||||||
description: {
|
|
||||||
|
|
||||||
`nix show-config` was renamed to `nix config show`, and `nix doctor` was renamed to `nix config check`, to be more consistent with the rest of the command-line interface.
|
|
||||||
|
|
||||||
}
|
|
|
@ -1,9 +0,0 @@
|
||||||
synopsis: Fix `nix-env --query --drv-path --json`
|
|
||||||
prs: #9257
|
|
||||||
description: {
|
|
||||||
|
|
||||||
Fixed a bug where `nix-env --query` ignored `--drv-path` when `--json` was set.
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
13
doc/manual/rl-next/nix-eval-derivations.md
Normal file
13
doc/manual/rl-next/nix-eval-derivations.md
Normal file
|
@ -0,0 +1,13 @@
|
||||||
|
---
|
||||||
|
synopsis: "`nix eval` prints derivations as `.drv` paths"
|
||||||
|
prs: 10200
|
||||||
|
---
|
||||||
|
|
||||||
|
`nix eval` will now print derivations as their `.drv` paths, rather than as
|
||||||
|
attribute sets. This makes commands like `nix eval nixpkgs#bash` terminate
|
||||||
|
instead of infinitely looping into recursive self-referential attributes:
|
||||||
|
|
||||||
|
```ShellSession
|
||||||
|
$ nix eval nixpkgs#bash
|
||||||
|
«derivation /nix/store/m32cbgbd598f4w299g0hwyv7gbw6rqcg-bash-5.2p26.drv»
|
||||||
|
```
|
|
@ -104,8 +104,12 @@
|
||||||
- [Channels](command-ref/files/channels.md)
|
- [Channels](command-ref/files/channels.md)
|
||||||
- [Default Nix expression](command-ref/files/default-nix-expression.md)
|
- [Default Nix expression](command-ref/files/default-nix-expression.md)
|
||||||
- [Architecture and Design](architecture/architecture.md)
|
- [Architecture and Design](architecture/architecture.md)
|
||||||
- [Protocols](protocols/index.md)
|
- [Formats and Protocols](protocols/index.md)
|
||||||
|
- [JSON Formats](protocols/json/index.md)
|
||||||
|
- [Store Object Info](protocols/json/store-object-info.md)
|
||||||
|
- [Derivation](protocols/json/derivation.md)
|
||||||
- [Serving Tarball Flakes](protocols/tarball-fetcher.md)
|
- [Serving Tarball Flakes](protocols/tarball-fetcher.md)
|
||||||
|
- [Store Path Specification](protocols/store-path.md)
|
||||||
- [Derivation "ATerm" file format](protocols/derivation-aterm.md)
|
- [Derivation "ATerm" file format](protocols/derivation-aterm.md)
|
||||||
- [Glossary](glossary.md)
|
- [Glossary](glossary.md)
|
||||||
- [Contributing](contributing/index.md)
|
- [Contributing](contributing/index.md)
|
||||||
|
@ -117,6 +121,8 @@
|
||||||
- [C++ style guide](contributing/cxx.md)
|
- [C++ style guide](contributing/cxx.md)
|
||||||
- [Release Notes](release-notes/index.md)
|
- [Release Notes](release-notes/index.md)
|
||||||
{{#include ./SUMMARY-rl-next.md}}
|
{{#include ./SUMMARY-rl-next.md}}
|
||||||
|
- [Release 2.21 (2024-03-11)](release-notes/rl-2.21.md)
|
||||||
|
- [Release 2.20 (2024-01-29)](release-notes/rl-2.20.md)
|
||||||
- [Release 2.19 (2023-11-17)](release-notes/rl-2.19.md)
|
- [Release 2.19 (2023-11-17)](release-notes/rl-2.19.md)
|
||||||
- [Release 2.18 (2023-09-20)](release-notes/rl-2.18.md)
|
- [Release 2.18 (2023-09-20)](release-notes/rl-2.18.md)
|
||||||
- [Release 2.17 (2023-07-24)](release-notes/rl-2.17.md)
|
- [Release 2.17 (2023-07-24)](release-notes/rl-2.17.md)
|
||||||
|
|
|
@ -31,10 +31,11 @@
|
||||||
/installation/installation /installation 301!
|
/installation/installation /installation 301!
|
||||||
|
|
||||||
/package-management/basic-package-mgmt /command-ref/nix-env 301!
|
/package-management/basic-package-mgmt /command-ref/nix-env 301!
|
||||||
/package-management/channels* /command-ref/nix-channel 301!
|
/package-management/channels /command-ref/nix-channel 301!
|
||||||
/package-management/package-management /package-management 301!
|
/package-management/package-management /package-management 301!
|
||||||
/package-management/s3-substituter* /command-ref/new-cli/nix3-help-stores#s3-binary-cache-store 301!
|
/package-management/s3-substituter /store/types/s3-binary-cache-store 301!
|
||||||
|
|
||||||
/protocols/protocols /protocols 301!
|
/protocols/protocols /protocols 301!
|
||||||
|
/json/* /protocols/json/:splat 301!
|
||||||
|
|
||||||
/release-notes/release-notes /release-notes 301!
|
/release-notes/release-notes /release-notes 301!
|
|
@ -36,16 +36,8 @@ error: cannot connect to 'mac'
|
||||||
then you need to ensure that the `PATH` of non-interactive login shells
|
then you need to ensure that the `PATH` of non-interactive login shells
|
||||||
contains Nix.
|
contains Nix.
|
||||||
|
|
||||||
> **Warning**
|
The [list of remote build machines](@docroot@/command-ref/conf-file.md#conf-builders) can be specified on the command line or in the Nix configuration file.
|
||||||
>
|
For example, the following command allows you to build a derivation for `x86_64-darwin` on a Linux machine:
|
||||||
> If you are building via the Nix daemon, it is the Nix daemon user account (that is, `root`) that should have SSH access to a user (not necessarily `root`) on the remote machine.
|
|
||||||
>
|
|
||||||
> If you can’t or don’t want to configure `root` to be able to access the remote machine, you can use a private Nix store instead by passing e.g. `--store ~/my-nix` when running a Nix command from the local machine.
|
|
||||||
|
|
||||||
The list of remote machines can be specified on the command line or in
|
|
||||||
the Nix configuration file. The former is convenient for testing. For
|
|
||||||
example, the following command allows you to build a derivation for
|
|
||||||
`x86_64-darwin` on a Linux machine:
|
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ uname
|
$ uname
|
||||||
|
@ -60,97 +52,20 @@ $ cat ./result
|
||||||
Darwin
|
Darwin
|
||||||
```
|
```
|
||||||
|
|
||||||
It is possible to specify multiple builders separated by a semicolon or
|
It is possible to specify multiple build machines separated by a semicolon or a newline, e.g.
|
||||||
a newline, e.g.
|
|
||||||
|
|
||||||
```console
|
```console
|
||||||
--builders 'ssh://mac x86_64-darwin ; ssh://beastie x86_64-freebsd'
|
--builders 'ssh://mac x86_64-darwin ; ssh://beastie x86_64-freebsd'
|
||||||
```
|
```
|
||||||
|
|
||||||
Each machine specification consists of the following elements, separated
|
Remote build machines can also be configured in [`nix.conf`](@docroot@/command-ref/conf-file.md), e.g.
|
||||||
by spaces. Only the first element is required. To leave a field at its
|
|
||||||
default, set it to `-`.
|
|
||||||
|
|
||||||
1. The URI of the remote store in the format
|
|
||||||
`ssh://[username@]hostname`, e.g. `ssh://nix@mac` or `ssh://mac`.
|
|
||||||
For backward compatibility, `ssh://` may be omitted. The hostname
|
|
||||||
may be an alias defined in your `~/.ssh/config`.
|
|
||||||
|
|
||||||
2. A comma-separated list of Nix platform type identifiers, such as
|
|
||||||
`x86_64-darwin`. It is possible for a machine to support multiple
|
|
||||||
platform types, e.g., `i686-linux,x86_64-linux`. If omitted, this
|
|
||||||
defaults to the local platform type.
|
|
||||||
|
|
||||||
3. The SSH identity file to be used to log in to the remote machine. If
|
|
||||||
omitted, SSH will use its regular identities.
|
|
||||||
|
|
||||||
4. The maximum number of builds that Nix will execute in parallel on
|
|
||||||
the machine. Typically this should be equal to the number of CPU
|
|
||||||
cores. For instance, the machine `itchy` in the example will execute
|
|
||||||
up to 8 builds in parallel.
|
|
||||||
|
|
||||||
5. The “speed factor”, indicating the relative speed of the machine. If
|
|
||||||
there are multiple machines of the right type, Nix will prefer the
|
|
||||||
fastest, taking load into account.
|
|
||||||
|
|
||||||
6. A comma-separated list of *supported features*. If a derivation has
|
|
||||||
the `requiredSystemFeatures` attribute, then Nix will only perform
|
|
||||||
the derivation on a machine that has the specified features. For
|
|
||||||
instance, the attribute
|
|
||||||
|
|
||||||
```nix
|
|
||||||
requiredSystemFeatures = [ "kvm" ];
|
|
||||||
```
|
|
||||||
|
|
||||||
will cause the build to be performed on a machine that has the `kvm`
|
|
||||||
feature.
|
|
||||||
|
|
||||||
7. A comma-separated list of *mandatory features*. A machine will only
|
|
||||||
be used to build a derivation if all of the machine’s mandatory
|
|
||||||
features appear in the derivation’s `requiredSystemFeatures`
|
|
||||||
attribute.
|
|
||||||
|
|
||||||
8. The (base64-encoded) public host key of the remote machine. If omitted, SSH
|
|
||||||
will use its regular known-hosts file. Specifically, the field is calculated
|
|
||||||
via `base64 -w0 /etc/ssh/ssh_host_ed25519_key.pub`.
|
|
||||||
|
|
||||||
For example, the machine specification
|
|
||||||
|
|
||||||
nix@scratchy.labs.cs.uu.nl i686-linux /home/nix/.ssh/id_scratchy_auto 8 1 kvm
|
|
||||||
nix@itchy.labs.cs.uu.nl i686-linux /home/nix/.ssh/id_scratchy_auto 8 2
|
|
||||||
nix@poochie.labs.cs.uu.nl i686-linux /home/nix/.ssh/id_scratchy_auto 1 2 kvm benchmark
|
|
||||||
|
|
||||||
specifies several machines that can perform `i686-linux` builds.
|
|
||||||
However, `poochie` will only do builds that have the attribute
|
|
||||||
|
|
||||||
```nix
|
|
||||||
requiredSystemFeatures = [ "benchmark" ];
|
|
||||||
```
|
|
||||||
|
|
||||||
or
|
|
||||||
|
|
||||||
```nix
|
|
||||||
requiredSystemFeatures = [ "benchmark" "kvm" ];
|
|
||||||
```
|
|
||||||
|
|
||||||
`itchy` cannot do builds that require `kvm`, but `scratchy` does support
|
|
||||||
such builds. For regular builds, `itchy` will be preferred over
|
|
||||||
`scratchy` because it has a higher speed factor.
|
|
||||||
|
|
||||||
Remote builders can also be configured in `nix.conf`, e.g.
|
|
||||||
|
|
||||||
builders = ssh://mac x86_64-darwin ; ssh://beastie x86_64-freebsd
|
builders = ssh://mac x86_64-darwin ; ssh://beastie x86_64-freebsd
|
||||||
|
|
||||||
Finally, remote builders can be configured in a separate configuration
|
Finally, remote build machines can be configured in a separate configuration
|
||||||
file included in `builders` via the syntax `@file`. For example,
|
file included in `builders` via the syntax `@/path/to/file`. For example,
|
||||||
|
|
||||||
builders = @/etc/nix/machines
|
builders = @/etc/nix/machines
|
||||||
|
|
||||||
causes the list of machines in `/etc/nix/machines` to be included. (This
|
causes the list of machines in `/etc/nix/machines` to be included.
|
||||||
is the default.)
|
(This is the default.)
|
||||||
|
|
||||||
If you want the builders to use caches, you likely want to set the
|
|
||||||
option `builders-use-substitutes` in your local `nix.conf`.
|
|
||||||
|
|
||||||
To build only on remote builders and disable building on the local
|
|
||||||
machine, you can use the option `--max-jobs 0`.
|
|
||||||
|
|
|
@ -51,7 +51,7 @@ These options are for deleting old [profiles] prior to deleting unreachable [sto
|
||||||
- <span id="opt-delete-old">[`--delete-old`](#opt-delete-old)</span> / `-d`\
|
- <span id="opt-delete-old">[`--delete-old`](#opt-delete-old)</span> / `-d`\
|
||||||
Delete all old generations of profiles.
|
Delete all old generations of profiles.
|
||||||
|
|
||||||
This is the equivalent of invoking `nix-env --delete-generations old` on each found profile.
|
This is the equivalent of invoking [`nix-env --delete-generations old`](@docroot@/command-ref/nix-env/delete-generations.md#generations-old) on each found profile.
|
||||||
|
|
||||||
- <span id="opt-delete-older-than">[`--delete-older-than`](#opt-delete-older-than)</span> *period*\
|
- <span id="opt-delete-older-than">[`--delete-older-than`](#opt-delete-older-than)</span> *period*\
|
||||||
Delete all generations of profiles older than the specified amount (except for the generations that were active at that point in time).
|
Delete all generations of profiles older than the specified amount (except for the generations that were active at that point in time).
|
||||||
|
|
|
@ -12,13 +12,13 @@ This operation deletes the specified generations of the current profile.
|
||||||
|
|
||||||
*generations* can be a one of the following:
|
*generations* can be a one of the following:
|
||||||
|
|
||||||
- <span id="generations-list">`<number>...`</span>:\
|
- <span id="generations-list">[`<number>...`](#generations-list)</span>:\
|
||||||
A list of generation numbers, each one a separate command-line argument.
|
A list of generation numbers, each one a separate command-line argument.
|
||||||
|
|
||||||
Delete exactly the profile generations given by their generation number.
|
Delete exactly the profile generations given by their generation number.
|
||||||
Deleting the current generation is not allowed.
|
Deleting the current generation is not allowed.
|
||||||
|
|
||||||
- The special value <span id="generations-old">`old`</span>
|
- <span id="generations-old">[The special value `old`](#generations-old)</span>
|
||||||
|
|
||||||
Delete all generations except the current one.
|
Delete all generations except the current one.
|
||||||
|
|
||||||
|
@ -30,7 +30,7 @@ This operation deletes the specified generations of the current profile.
|
||||||
> Because one can roll back to a previous generation, it is possible to have generations newer than the current one.
|
> Because one can roll back to a previous generation, it is possible to have generations newer than the current one.
|
||||||
> They will also be deleted.
|
> They will also be deleted.
|
||||||
|
|
||||||
- <span id="generations-time">`<number>d`</span>:\
|
- <span id="generations-time">[`<number>d`](#generations-time)</span>:\
|
||||||
The last *number* days
|
The last *number* days
|
||||||
|
|
||||||
*Example*: `30d`
|
*Example*: `30d`
|
||||||
|
@ -38,7 +38,7 @@ This operation deletes the specified generations of the current profile.
|
||||||
Delete all generations created more than *number* days ago, except the most recent one of them.
|
Delete all generations created more than *number* days ago, except the most recent one of them.
|
||||||
This allows rolling back to generations that were available within the specified period.
|
This allows rolling back to generations that were available within the specified period.
|
||||||
|
|
||||||
- <span id="generations-count">`+<number>`</span>:\
|
- <span id="generations-count">[`+<number>`](#generations-count)</span>:\
|
||||||
The last *number* generations up to the present
|
The last *number* generations up to the present
|
||||||
|
|
||||||
*Example*: `+5`
|
*Example*: `+5`
|
||||||
|
|
|
@ -35,13 +35,51 @@ standard input.
|
||||||
|
|
||||||
- `--parse`\
|
- `--parse`\
|
||||||
Just parse the input files, and print their abstract syntax trees on
|
Just parse the input files, and print their abstract syntax trees on
|
||||||
standard output in ATerm format.
|
standard output as a Nix expression.
|
||||||
|
|
||||||
- `--eval`\
|
- `--eval`\
|
||||||
Just parse and evaluate the input files, and print the resulting
|
Just parse and evaluate the input files, and print the resulting
|
||||||
values on standard output. No instantiation of store derivations
|
values on standard output. No instantiation of store derivations
|
||||||
takes place.
|
takes place.
|
||||||
|
|
||||||
|
> **Warning**
|
||||||
|
>
|
||||||
|
> This option produces output which can be parsed as a Nix expression which
|
||||||
|
> will produce a different result than the input expression when evaluated.
|
||||||
|
> For example, these two Nix expressions print the same result despite
|
||||||
|
> having different meaning:
|
||||||
|
>
|
||||||
|
> ```console
|
||||||
|
> $ nix-instantiate --eval --expr '{ a = {}; }'
|
||||||
|
> { a = <CODE>; }
|
||||||
|
> $ nix-instantiate --eval --expr '{ a = <CODE>; }'
|
||||||
|
> { a = <CODE>; }
|
||||||
|
> ```
|
||||||
|
>
|
||||||
|
> For human-readable output, `nix eval` (experimental) is more informative:
|
||||||
|
>
|
||||||
|
> ```console
|
||||||
|
> $ nix-instantiate --eval --expr 'a: a'
|
||||||
|
> <LAMBDA>
|
||||||
|
> $ nix eval --expr 'a: a'
|
||||||
|
> «lambda @ «string»:1:1»
|
||||||
|
> ```
|
||||||
|
>
|
||||||
|
> For machine-readable output, the `--xml` option produces unambiguous
|
||||||
|
> output:
|
||||||
|
>
|
||||||
|
> ```console
|
||||||
|
> $ nix-instantiate --eval --xml --expr '{ foo = <CODE>; }'
|
||||||
|
> <?xml version='1.0' encoding='utf-8'?>
|
||||||
|
> <expr>
|
||||||
|
> <attrs>
|
||||||
|
> <attr column="3" line="1" name="foo">
|
||||||
|
> <unevaluated />
|
||||||
|
> </attr>
|
||||||
|
> </attrs>
|
||||||
|
> </expr>
|
||||||
|
> ```
|
||||||
|
|
||||||
- `--find-file`\
|
- `--find-file`\
|
||||||
Look up the given files in Nix’s search path (as specified by the
|
Look up the given files in Nix’s search path (as specified by the
|
||||||
`NIX_PATH` environment variable). If found, print the corresponding
|
`NIX_PATH` environment variable). If found, print the corresponding
|
||||||
|
@ -61,11 +99,11 @@ standard input.
|
||||||
|
|
||||||
- `--json`\
|
- `--json`\
|
||||||
When used with `--eval`, print the resulting value as an JSON
|
When used with `--eval`, print the resulting value as an JSON
|
||||||
representation of the abstract syntax tree rather than as an ATerm.
|
representation of the abstract syntax tree rather than as a Nix expression.
|
||||||
|
|
||||||
- `--xml`\
|
- `--xml`\
|
||||||
When used with `--eval`, print the resulting value as an XML
|
When used with `--eval`, print the resulting value as an XML
|
||||||
representation of the abstract syntax tree rather than as an ATerm.
|
representation of the abstract syntax tree rather than as a Nix expression.
|
||||||
The schema is the same as that used by the [`toXML`
|
The schema is the same as that used by the [`toXML`
|
||||||
built-in](../language/builtins.md).
|
built-in](../language/builtins.md).
|
||||||
|
|
||||||
|
@ -133,28 +171,24 @@ $ nix-instantiate --eval --xml --expr '1 + 2'
|
||||||
The difference between non-strict and strict evaluation:
|
The difference between non-strict and strict evaluation:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-instantiate --eval --xml --expr 'rec { x = "foo"; y = x; }'
|
$ nix-instantiate --eval --xml --expr '{ x = {}; }'
|
||||||
...
|
<?xml version='1.0' encoding='utf-8'?>
|
||||||
<attr name="x">
|
<expr>
|
||||||
<string value="foo" />
|
<attrs>
|
||||||
</attr>
|
<attr column="3" line="1" name="x">
|
||||||
<attr name="y">
|
|
||||||
<unevaluated />
|
<unevaluated />
|
||||||
</attr>
|
</attr>
|
||||||
...
|
</attrs>
|
||||||
```
|
</expr>
|
||||||
|
|
||||||
Note that `y` is left unevaluated (the XML representation doesn’t
|
$ nix-instantiate --eval --xml --strict --expr '{ x = {}; }'
|
||||||
attempt to show non-normal forms).
|
<?xml version='1.0' encoding='utf-8'?>
|
||||||
|
<expr>
|
||||||
```console
|
<attrs>
|
||||||
$ nix-instantiate --eval --xml --strict --expr 'rec { x = "foo"; y = x; }'
|
<attr column="3" line="1" name="x">
|
||||||
...
|
<attrs>
|
||||||
<attr name="x">
|
</attrs>
|
||||||
<string value="foo" />
|
|
||||||
</attr>
|
</attr>
|
||||||
<attr name="y">
|
</attrs>
|
||||||
<string value="foo" />
|
</expr>
|
||||||
</attr>
|
|
||||||
...
|
|
||||||
```
|
```
|
||||||
|
|
|
@ -30,7 +30,7 @@ To build the manual incrementally, [enter the development shell](./hacking.md) a
|
||||||
make manual-html -j $NIX_BUILD_CORES
|
make manual-html -j $NIX_BUILD_CORES
|
||||||
```
|
```
|
||||||
|
|
||||||
and open `./outputs/out/share/doc/nix/manual/language/index.html`.
|
and open `./outputs/doc/share/doc/nix/manual/language/index.html`.
|
||||||
|
|
||||||
In order to reflect changes to the [Makefile for the manual], clear all generated files before re-building:
|
In order to reflect changes to the [Makefile for the manual], clear all generated files before re-building:
|
||||||
|
|
||||||
|
@ -172,7 +172,7 @@ Please observe these guidelines to ease reviews:
|
||||||
> ```
|
> ```
|
||||||
````
|
````
|
||||||
|
|
||||||
Highlight syntax definiions as such, using [EBNF](https://en.wikipedia.org/wiki/Extended_Backus%E2%80%93Naur_form) notation:
|
Highlight syntax definitions as such, using [EBNF](https://en.wikipedia.org/wiki/Extended_Backus%E2%80%93Naur_form) notation:
|
||||||
|
|
||||||
````
|
````
|
||||||
> **Syntax**
|
> **Syntax**
|
||||||
|
|
|
@ -31,7 +31,7 @@ This shell also adds `./outputs/bin/nix` to your `$PATH` so you can run `nix` im
|
||||||
To get a shell with one of the other [supported compilation environments](#compilation-environments):
|
To get a shell with one of the other [supported compilation environments](#compilation-environments):
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix develop .#native-clang11StdenvPackages
|
$ nix develop .#native-clangStdenvPackages
|
||||||
```
|
```
|
||||||
|
|
||||||
> **Note**
|
> **Note**
|
||||||
|
@ -44,18 +44,21 @@ To build Nix itself in this shell:
|
||||||
```console
|
```console
|
||||||
[nix-shell]$ autoreconfPhase
|
[nix-shell]$ autoreconfPhase
|
||||||
[nix-shell]$ configurePhase
|
[nix-shell]$ configurePhase
|
||||||
[nix-shell]$ make -j $NIX_BUILD_CORES
|
[nix-shell]$ make -j $NIX_BUILD_CORES OPTIMIZE=0
|
||||||
```
|
```
|
||||||
|
|
||||||
To install it in `$(pwd)/outputs` and test it:
|
To install it in `$(pwd)/outputs` and test it:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
[nix-shell]$ make install
|
[nix-shell]$ make install OPTIMIZE=0
|
||||||
[nix-shell]$ make installcheck -j $NIX_BUILD_CORES
|
[nix-shell]$ make installcheck check -j $NIX_BUILD_CORES
|
||||||
[nix-shell]$ nix --version
|
[nix-shell]$ nix --version
|
||||||
nix (Nix) 2.12
|
nix (Nix) 2.12
|
||||||
```
|
```
|
||||||
|
|
||||||
|
For more information on running and filtering tests, see
|
||||||
|
[`testing.md`](./testing.md).
|
||||||
|
|
||||||
To build a release version of Nix for the current operating system and CPU architecture:
|
To build a release version of Nix for the current operating system and CPU architecture:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
|
@ -75,7 +78,7 @@ $ nix-shell
|
||||||
To get a shell with one of the other [supported compilation environments](#compilation-environments):
|
To get a shell with one of the other [supported compilation environments](#compilation-environments):
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-shell --attr devShells.x86_64-linux.native-clang11StdenvPackages
|
$ nix-shell --attr devShells.x86_64-linux.native-clangStdenvPackages
|
||||||
```
|
```
|
||||||
|
|
||||||
> **Note**
|
> **Note**
|
||||||
|
@ -108,6 +111,26 @@ $ nix-build
|
||||||
|
|
||||||
You can also build Nix for one of the [supported platforms](#platforms).
|
You can also build Nix for one of the [supported platforms](#platforms).
|
||||||
|
|
||||||
|
## Makefile variables
|
||||||
|
|
||||||
|
You may need `profiledir=$out/etc/profile.d` and `sysconfdir=$out/etc` to run `make install`.
|
||||||
|
|
||||||
|
Run `make` with [`-e` / `--environment-overrides`](https://www.gnu.org/software/make/manual/make.html#index-_002de) to allow environment variables to override `Makefile` variables:
|
||||||
|
|
||||||
|
- `ENABLE_BUILD=yes` to enable building the C++ code.
|
||||||
|
- `ENABLE_DOC_GEN=yes` to enable building the documentation (manual, man pages, etc.).
|
||||||
|
|
||||||
|
The docs can take a while to build, so you may want to disable this for local development.
|
||||||
|
- `ENABLE_FUNCTIONAL_TESTS=yes` to enable building the functional tests.
|
||||||
|
- `ENABLE_UNIT_TESTS=yes` to enable building the unit tests.
|
||||||
|
- `OPTIMIZE=1` to enable optimizations.
|
||||||
|
- `libraries=libutil programs=` to only build a specific library.
|
||||||
|
|
||||||
|
This will fail in the linking phase if the other libraries haven't been built, but is useful for checking types.
|
||||||
|
- `libraries= programs=nix` to only build a specific program.
|
||||||
|
|
||||||
|
This will not work in general, because the programs need the libraries.
|
||||||
|
|
||||||
## Platforms
|
## Platforms
|
||||||
|
|
||||||
Nix can be built for various platforms, as specified in [`flake.nix`]:
|
Nix can be built for various platforms, as specified in [`flake.nix`]:
|
||||||
|
@ -124,10 +147,10 @@ Nix can be built for various platforms, as specified in [`flake.nix`]:
|
||||||
|
|
||||||
In order to build Nix for a different platform than the one you're currently
|
In order to build Nix for a different platform than the one you're currently
|
||||||
on, you need a way for your current Nix installation to build code for that
|
on, you need a way for your current Nix installation to build code for that
|
||||||
platform. Common solutions include [remote builders] and [binary format emulation]
|
platform. Common solutions include [remote build machines] and [binary format emulation]
|
||||||
(only supported on NixOS).
|
(only supported on NixOS).
|
||||||
|
|
||||||
[remote builders]: ../advanced-topics/distributed-builds.md
|
[remote builders]: @docroot@/language/derivations.md#attr-builder
|
||||||
[binary format emulation]: https://nixos.org/manual/nixos/stable/options.html#opt-boot.binfmt.emulatedSystems
|
[binary format emulation]: https://nixos.org/manual/nixos/stable/options.html#opt-boot.binfmt.emulatedSystems
|
||||||
|
|
||||||
Given such a setup, executing the build only requires selecting the respective attribute.
|
Given such a setup, executing the build only requires selecting the respective attribute.
|
||||||
|
@ -235,10 +258,10 @@ See [supported compilation environments](#compilation-environments) and instruct
|
||||||
To use the LSP with your editor, you first need to [set up `clangd`](https://clangd.llvm.org/installation#project-setup) by running:
|
To use the LSP with your editor, you first need to [set up `clangd`](https://clangd.llvm.org/installation#project-setup) by running:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
make clean && bear -- make -j$NIX_BUILD_CORES default check install
|
make compile_commands.json
|
||||||
```
|
```
|
||||||
|
|
||||||
Configure your editor to use the `clangd` from the shell, either by running it inside the development shell, or by using [nix-direnv](https://github.com/nix-community/nix-direnv) and [the appropriate editor plugin](https://github.com/direnv/direnv/wiki#editor-integration).
|
Configure your editor to use the `clangd` from the `.#native-clangStdenvPackages` shell. You can do that either by running it inside the development shell, or by using [nix-direnv](https://github.com/nix-community/nix-direnv) and [the appropriate editor plugin](https://github.com/direnv/direnv/wiki#editor-integration).
|
||||||
|
|
||||||
> **Note**
|
> **Note**
|
||||||
>
|
>
|
||||||
|
@ -257,17 +280,16 @@ User-visible changes should come with a release note.
|
||||||
Here's what a complete entry looks like. The file name is not incorporated in the document.
|
Here's what a complete entry looks like. The file name is not incorporated in the document.
|
||||||
|
|
||||||
```
|
```
|
||||||
|
---
|
||||||
synopsis: Basically a title
|
synopsis: Basically a title
|
||||||
issues: #1234
|
issues: 1234
|
||||||
prs: #1238
|
prs: 1238
|
||||||
description: {
|
---
|
||||||
|
|
||||||
Here's one or more paragraphs that describe the change.
|
Here's one or more paragraphs that describe the change.
|
||||||
|
|
||||||
- It's markdown
|
- It's markdown
|
||||||
- Add references to the manual using @docroot@
|
- Add references to the manual using @docroot@
|
||||||
|
|
||||||
}
|
|
||||||
```
|
```
|
||||||
|
|
||||||
Significant changes should add the following header, which moves them to the top.
|
Significant changes should add the following header, which moves them to the top.
|
||||||
|
@ -282,4 +304,45 @@ See also the [format documentation](https://github.com/haskell/cabal/blob/master
|
||||||
### Build process
|
### Build process
|
||||||
|
|
||||||
Releases have a precomputed `rl-MAJOR.MINOR.md`, and no `rl-next.md`.
|
Releases have a precomputed `rl-MAJOR.MINOR.md`, and no `rl-next.md`.
|
||||||
Set `buildUnreleasedNotes = true;` in `flake.nix` to build the release notes on the fly.
|
|
||||||
|
## Branches
|
||||||
|
|
||||||
|
- [`master`](https://github.com/NixOS/nix/commits/master)
|
||||||
|
|
||||||
|
The main development branch. All changes are approved and merged here.
|
||||||
|
When developing a change, create a branch based on the latest `master`.
|
||||||
|
|
||||||
|
Maintainers try to [keep it in a release-worthy state](#reverting).
|
||||||
|
|
||||||
|
- [`maintenance-*.*`](https://github.com/NixOS/nix/branches/all?query=maintenance)
|
||||||
|
|
||||||
|
These branches are the subject of backports only, and are
|
||||||
|
also [kept](#reverting) in a release-worthy state.
|
||||||
|
|
||||||
|
See [`maintainers/backporting.md`](https://github.com/NixOS/nix/blob/master/maintainers/backporting.md)
|
||||||
|
|
||||||
|
- [`latest-release`](https://github.com/NixOS/nix/tree/latest-release)
|
||||||
|
|
||||||
|
The latest patch release of the latest minor version.
|
||||||
|
|
||||||
|
See [`maintainers/release-process.md`](https://github.com/NixOS/nix/blob/master/maintainers/release-process.md)
|
||||||
|
|
||||||
|
- [`backport-*-to-*`](https://github.com/NixOS/nix/branches/all?query=backport)
|
||||||
|
|
||||||
|
Generally branches created by the backport action.
|
||||||
|
|
||||||
|
See [`maintainers/backporting.md`](https://github.com/NixOS/nix/blob/master/maintainers/backporting.md)
|
||||||
|
|
||||||
|
- [_other_](https://github.com/NixOS/nix/branches/all)
|
||||||
|
|
||||||
|
Branches that do not conform to the above patterns should be feature branches.
|
||||||
|
|
||||||
|
## Reverting
|
||||||
|
|
||||||
|
If a change turns out to be merged by mistake, or contain a regression, it may be reverted.
|
||||||
|
A revert is not a rejection of the contribution, but merely part of an effective development process.
|
||||||
|
It makes sure that development keeps running smoothly, with minimal uncertainty, and less overhead.
|
||||||
|
If maintainers have to worry too much about avoiding reverts, they would not be able to merge as much.
|
||||||
|
By embracing reverts as a good part of the development process, everyone wins.
|
||||||
|
|
||||||
|
However, taking a step back may be frustrating, so maintainers will be extra supportive on the next try.
|
||||||
|
|
|
@ -77,7 +77,7 @@ there is no risk of any build-system wildcards for the library accidentally pick
|
||||||
### Running tests
|
### Running tests
|
||||||
|
|
||||||
You can run the whole testsuite with `make check`, or the tests for a specific component with `make libfoo-tests_RUN`.
|
You can run the whole testsuite with `make check`, or the tests for a specific component with `make libfoo-tests_RUN`.
|
||||||
Finer-grained filtering is also possible using the [--gtest_filter](https://google.github.io/googletest/advanced.html#running-a-subset-of-the-tests) command-line option, or the `GTEST_FILTER` environment variable.
|
Finer-grained filtering is also possible using the [--gtest_filter](https://google.github.io/googletest/advanced.html#running-a-subset-of-the-tests) command-line option, or the `GTEST_FILTER` environment variable, e.g. `GTEST_FILTER='ErrorTraceTest.*' make check`.
|
||||||
|
|
||||||
### Characterisation testing { #characaterisation-testing-unit }
|
### Characterisation testing { #characaterisation-testing-unit }
|
||||||
|
|
||||||
|
|
|
@ -3,10 +3,10 @@
|
||||||
- [derivation]{#gloss-derivation}
|
- [derivation]{#gloss-derivation}
|
||||||
|
|
||||||
A description of a build task. The result of a derivation is a
|
A description of a build task. The result of a derivation is a
|
||||||
store object. Derivations are typically specified in Nix expressions
|
store object. Derivations declared in Nix expressions are specified
|
||||||
using the [`derivation` primitive](./language/derivations.md). These are
|
using the [`derivation` primitive](./language/derivations.md). These are
|
||||||
translated into low-level *store derivations* (implicitly by
|
translated into low-level *store derivations* (implicitly by
|
||||||
`nix-env` and `nix-build`, or explicitly by `nix-instantiate`).
|
`nix-build`, or explicitly by `nix-instantiate`).
|
||||||
|
|
||||||
[derivation]: #gloss-derivation
|
[derivation]: #gloss-derivation
|
||||||
|
|
||||||
|
@ -14,6 +14,7 @@
|
||||||
|
|
||||||
A [derivation] represented as a `.drv` file in the [store].
|
A [derivation] represented as a `.drv` file in the [store].
|
||||||
It has a [store path], like any [store object].
|
It has a [store path], like any [store object].
|
||||||
|
It is the [instantiated][instantiate] form of a derivation.
|
||||||
|
|
||||||
Example: `/nix/store/g946hcz4c8mdvq2g8vxx42z51qb71rvp-git-2.38.1.drv`
|
Example: `/nix/store/g946hcz4c8mdvq2g8vxx42z51qb71rvp-git-2.38.1.drv`
|
||||||
|
|
||||||
|
@ -23,9 +24,9 @@
|
||||||
|
|
||||||
- [instantiate]{#gloss-instantiate}, instantiation
|
- [instantiate]{#gloss-instantiate}, instantiation
|
||||||
|
|
||||||
Translate a [derivation] into a [store derivation].
|
Save an evaluated [derivation] as a [store derivation] in the Nix [store].
|
||||||
|
|
||||||
See [`nix-instantiate`](./command-ref/nix-instantiate.md).
|
See [`nix-instantiate`](./command-ref/nix-instantiate.md), which produces a store derivation from a Nix expression that evaluates to a derivation.
|
||||||
|
|
||||||
[instantiate]: #gloss-instantiate
|
[instantiate]: #gloss-instantiate
|
||||||
|
|
||||||
|
@ -36,7 +37,7 @@
|
||||||
This can be achieved by:
|
This can be achieved by:
|
||||||
- Fetching a pre-built [store object] from a [substituter]
|
- Fetching a pre-built [store object] from a [substituter]
|
||||||
- Running the [`builder`](@docroot@/language/derivations.md#attr-builder) executable as specified in the corresponding [derivation]
|
- Running the [`builder`](@docroot@/language/derivations.md#attr-builder) executable as specified in the corresponding [derivation]
|
||||||
- Delegating to a [remote builder](@docroot@/advanced-topics/distributed-builds.html) and retrieving the outputs
|
- Delegating to a [remote machine](@docroot@/command-ref/conf-file.md#conf-builders) and retrieving the outputs
|
||||||
<!-- TODO: link [running] to build process page, #8888 -->
|
<!-- TODO: link [running] to build process page, #8888 -->
|
||||||
|
|
||||||
See [`nix-store --realise`](@docroot@/command-ref/nix-store/realise.md) for a detailed description of the algorithm.
|
See [`nix-store --realise`](@docroot@/command-ref/nix-store/realise.md) for a detailed description of the algorithm.
|
||||||
|
@ -58,23 +59,12 @@
|
||||||
|
|
||||||
- [store]{#gloss-store}
|
- [store]{#gloss-store}
|
||||||
|
|
||||||
A collection of store objects, with operations to manipulate that collection.
|
A collection of [store objects][store object], with operations to manipulate that collection.
|
||||||
See [Nix store](./store/index.md) for details.
|
See [Nix Store](./store/index.md) for details.
|
||||||
|
|
||||||
There are many types of stores.
|
There are many types of stores, see [Store Types](./store/types/index.md) for details.
|
||||||
See [`nix help-stores`](@docroot@/command-ref/new-cli/nix3-help-stores.md) for a complete list.
|
|
||||||
|
|
||||||
From the perspective of the location where Nix is invoked, the Nix store can be referred to _local_ or _remote_.
|
|
||||||
Only a [local store]{#gloss-local-store} exposes a location in the file system of the machine where Nix is invoked that allows access to store objects, typically `/nix/store`.
|
|
||||||
Local stores can be used for building [derivations](#derivation).
|
|
||||||
See [Local Store](@docroot@/command-ref/new-cli/nix3-help-stores.md#local-store) for details.
|
|
||||||
|
|
||||||
[store]: #gloss-store
|
[store]: #gloss-store
|
||||||
[local store]: #gloss-local-store
|
|
||||||
|
|
||||||
- [chroot store]{#gloss-chroot-store}
|
|
||||||
|
|
||||||
A [local store] whose canonical path is anything other than `/nix/store`.
|
|
||||||
|
|
||||||
- [binary cache]{#gloss-binary-cache}
|
- [binary cache]{#gloss-binary-cache}
|
||||||
|
|
||||||
|
@ -86,7 +76,7 @@
|
||||||
|
|
||||||
- [store path]{#gloss-store-path}
|
- [store path]{#gloss-store-path}
|
||||||
|
|
||||||
The location of a [store object](@docroot@/store/index.md#store-object) in the file system, i.e., an immediate child of the Nix store directory.
|
The location of a [store object] in the file system, i.e., an immediate child of the Nix store directory.
|
||||||
|
|
||||||
> **Example**
|
> **Example**
|
||||||
>
|
>
|
||||||
|
@ -96,7 +86,7 @@
|
||||||
|
|
||||||
[store path]: #gloss-store-path
|
[store path]: #gloss-store-path
|
||||||
|
|
||||||
- [file system object]{#gloss-store-object}
|
- [file system object]{#gloss-file-system-object}
|
||||||
|
|
||||||
The Nix data model for representing simplified file system data.
|
The Nix data model for representing simplified file system data.
|
||||||
|
|
||||||
|
@ -126,7 +116,7 @@
|
||||||
non-[fixed-output](#gloss-fixed-output-derivation)
|
non-[fixed-output](#gloss-fixed-output-derivation)
|
||||||
derivation.
|
derivation.
|
||||||
|
|
||||||
- [output-addressed store object]{#gloss-output-addressed-store-object}
|
- [content-addressed store object]{#gloss-content-addressed-store-object}
|
||||||
|
|
||||||
A [store object] whose [store path] is determined by its contents.
|
A [store object] whose [store path] is determined by its contents.
|
||||||
This includes derivations, the outputs of [content-addressed derivations](#gloss-content-addressed-derivation), and the outputs of [fixed-output derivations](#gloss-fixed-output-derivation).
|
This includes derivations, the outputs of [content-addressed derivations](#gloss-content-addressed-derivation), and the outputs of [fixed-output derivations](#gloss-fixed-output-derivation).
|
||||||
|
@ -155,6 +145,11 @@
|
||||||
builder can rely on external inputs such as the network or the
|
builder can rely on external inputs such as the network or the
|
||||||
system time) but the Nix model assumes it.
|
system time) but the Nix model assumes it.
|
||||||
|
|
||||||
|
- [impure derivation]{#gloss-impure-derivation}
|
||||||
|
|
||||||
|
[An experimental feature](#@docroot@/contributing/experimental-features.md#xp-feature-impure-derivations) that allows derivations to be explicitly marked as impure,
|
||||||
|
so that they are always rebuilt, and their outputs not reused by subsequent calls to realise them.
|
||||||
|
|
||||||
- [Nix database]{#gloss-nix-database}
|
- [Nix database]{#gloss-nix-database}
|
||||||
|
|
||||||
An SQlite database to track [reference]s between [store object]s.
|
An SQlite database to track [reference]s between [store object]s.
|
||||||
|
@ -166,11 +161,13 @@
|
||||||
|
|
||||||
- [Nix expression]{#gloss-nix-expression}
|
- [Nix expression]{#gloss-nix-expression}
|
||||||
|
|
||||||
A high-level description of software packages and compositions
|
1. Commonly, a high-level description of software packages and compositions
|
||||||
thereof. Deploying software using Nix entails writing Nix
|
thereof. Deploying software using Nix entails writing Nix
|
||||||
expressions for your packages. Nix expressions are translated to
|
expressions for your packages. Nix expressions specify [derivations][derivation],
|
||||||
derivations that are stored in the Nix store. These derivations can
|
which are [instantiated][instantiate] into the Nix store as [store derivations][store derivation].
|
||||||
then be built.
|
These derivations can then be [realised][realise] to produce [outputs][output].
|
||||||
|
|
||||||
|
2. A syntactically valid use of the [Nix language]. For example, the contents of a `.nix` file form an expression.
|
||||||
|
|
||||||
- [reference]{#gloss-reference}
|
- [reference]{#gloss-reference}
|
||||||
|
|
||||||
|
@ -222,6 +219,9 @@
|
||||||
|
|
||||||
The [store derivation] that produced an [output path].
|
The [store derivation] that produced an [output path].
|
||||||
|
|
||||||
|
The deriver for an output path can be queried with the `--deriver` option to
|
||||||
|
[`nix-store --query`](@docroot@/command-ref/nix-store/query.md).
|
||||||
|
|
||||||
- [validity]{#gloss-validity}
|
- [validity]{#gloss-validity}
|
||||||
|
|
||||||
A store path is valid if all [store object]s in its [closure] can be read from the [store].
|
A store path is valid if all [store object]s in its [closure] can be read from the [store].
|
||||||
|
@ -232,6 +232,7 @@
|
||||||
- All paths in the store path's [closure] are valid.
|
- All paths in the store path's [closure] are valid.
|
||||||
|
|
||||||
[validity]: #gloss-validity
|
[validity]: #gloss-validity
|
||||||
|
[local store]: @docroot@/store/types/local-store.md
|
||||||
|
|
||||||
- [user environment]{#gloss-user-env}
|
- [user environment]{#gloss-user-env}
|
||||||
|
|
||||||
|
@ -266,6 +267,21 @@
|
||||||
|
|
||||||
The epsilon symbol. In the context of a package, this means the version is empty. More precisely, the derivation does not have a version attribute.
|
The epsilon symbol. In the context of a package, this means the version is empty. More precisely, the derivation does not have a version attribute.
|
||||||
|
|
||||||
|
- [package]{#package}
|
||||||
|
|
||||||
|
1. A software package; a collection of files and other data.
|
||||||
|
|
||||||
|
2. A [package attribute set].
|
||||||
|
|
||||||
|
- [package attribute set]{#package-attribute-set}
|
||||||
|
|
||||||
|
An [attribute set](@docroot@/language/values.md#attribute-set) containing the attribute `type = "derivation";` (derivation for historical reasons), as well as other attributes, such as
|
||||||
|
- attributes that refer to the files of a [package], typically in the form of [derivation outputs](#output),
|
||||||
|
- attributes that declare something about how the package is supposed to be installed or used,
|
||||||
|
- other metadata or arbitrary attributes.
|
||||||
|
|
||||||
|
[package attribute set]: #package-attribute-set
|
||||||
|
|
||||||
- [string interpolation]{#gloss-string-interpolation}
|
- [string interpolation]{#gloss-string-interpolation}
|
||||||
|
|
||||||
Expanding expressions enclosed in `${ }` within a [string], [path], or [attribute name].
|
Expanding expressions enclosed in `${ }` within a [string], [path], or [attribute name].
|
||||||
|
@ -282,3 +298,6 @@
|
||||||
These flags are enabled or disabled with the [`experimental-features`](./command-ref/conf-file.html#conf-experimental-features) setting.
|
These flags are enabled or disabled with the [`experimental-features`](./command-ref/conf-file.html#conf-experimental-features) setting.
|
||||||
|
|
||||||
See the contribution guide on the [purpose and lifecycle of experimental feaures](@docroot@/contributing/experimental-features.md).
|
See the contribution guide on the [purpose and lifecycle of experimental feaures](@docroot@/contributing/experimental-features.md).
|
||||||
|
|
||||||
|
|
||||||
|
[Nix language]: ./language/index.md
|
||||||
|
|
|
@ -1,26 +1,60 @@
|
||||||
# Installing a Binary Distribution
|
# Installing a Binary Distribution
|
||||||
|
|
||||||
The easiest way to install Nix is to run the following command:
|
To install the latest version Nix, run the following command:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ curl -L https://nixos.org/nix/install | sh
|
$ curl -L https://nixos.org/nix/install | sh
|
||||||
```
|
```
|
||||||
|
|
||||||
This will run the installer interactively (causing it to explain what
|
This performs the default type of installation for your platform:
|
||||||
it is doing more explicitly), and perform the default "type" of install
|
|
||||||
for your platform:
|
|
||||||
- single-user on Linux
|
|
||||||
- multi-user on macOS
|
|
||||||
|
|
||||||
> **Notes on read-only filesystem root in macOS 10.15 Catalina +**
|
- [Multi-user](#multi-user-installation):
|
||||||
>
|
- Linux with systemd and without SELinux
|
||||||
> - It took some time to support this cleanly. You may see posts,
|
- macOS
|
||||||
> examples, and tutorials using obsolete workarounds.
|
- [Single-user](#single-user-installation):
|
||||||
> - Supporting it cleanly made macOS installs too complex to qualify
|
- Linux without systemd
|
||||||
> as single-user, so this type is no longer supported on macOS.
|
- Linux with SELinux
|
||||||
|
|
||||||
We recommend the multi-user install if it supports your platform and
|
We recommend the multi-user installation if it supports your platform and you can authenticate with `sudo`.
|
||||||
you can authenticate with `sudo`.
|
|
||||||
|
The installer can configured with various command line arguments and environment variables.
|
||||||
|
To show available command line flags:
|
||||||
|
|
||||||
|
```console
|
||||||
|
$ curl -L https://nixos.org/nix/install | sh -s -- --help
|
||||||
|
```
|
||||||
|
|
||||||
|
To check what it does and how it can be customised further, [download and edit the second-stage installation script](#installing-from-a-binary-tarball).
|
||||||
|
|
||||||
|
# Installing a pinned Nix version from a URL
|
||||||
|
|
||||||
|
Version-specific installation URLs for all Nix versions since 1.11.16 can be found at [releases.nixos.org](https://releases.nixos.org/?prefix=nix/).
|
||||||
|
The directory for each version contains the corresponding SHA-256 hash.
|
||||||
|
|
||||||
|
All installation scripts are invoked the same way:
|
||||||
|
|
||||||
|
```console
|
||||||
|
$ export VERSION=2.19.2
|
||||||
|
$ curl -L https://releases.nixos.org/nix/nix-$VERSION/install | sh
|
||||||
|
```
|
||||||
|
|
||||||
|
# Multi User Installation
|
||||||
|
|
||||||
|
The multi-user Nix installation creates system users and a system service for the Nix daemon.
|
||||||
|
|
||||||
|
Supported systems:
|
||||||
|
|
||||||
|
- Linux running systemd, with SELinux disabled
|
||||||
|
- macOS
|
||||||
|
|
||||||
|
To explicitly instruct the installer to perform a multi-user installation on your system:
|
||||||
|
|
||||||
|
```console
|
||||||
|
$ curl -L https://nixos.org/nix/install | sh -s -- --daemon
|
||||||
|
```
|
||||||
|
|
||||||
|
You can run this under your usual user account or `root`.
|
||||||
|
The script will invoke `sudo` as needed.
|
||||||
|
|
||||||
# Single User Installation
|
# Single User Installation
|
||||||
|
|
||||||
|
@ -30,60 +64,48 @@ To explicitly select a single-user installation on your system:
|
||||||
$ curl -L https://nixos.org/nix/install | sh -s -- --no-daemon
|
$ curl -L https://nixos.org/nix/install | sh -s -- --no-daemon
|
||||||
```
|
```
|
||||||
|
|
||||||
This will perform a single-user installation of Nix, meaning that `/nix`
|
In a single-user installation, `/nix` is owned by the invoking user.
|
||||||
is owned by the invoking user. You can run this under your usual user
|
The script will invoke `sudo` to create `/nix` if it doesn’t already exist.
|
||||||
account or root. The script will invoke `sudo` to create `/nix`
|
If you don’t have `sudo`, manually create `/nix` as `root`:
|
||||||
if it doesn’t already exist. If you don’t have `sudo`, you should
|
|
||||||
manually create `/nix` first as root, e.g.:
|
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ mkdir /nix
|
$ su root
|
||||||
$ chown alice /nix
|
# mkdir /nix
|
||||||
|
# chown alice /nix
|
||||||
```
|
```
|
||||||
|
|
||||||
The install script will modify the first writable file from amongst
|
# Installing from a binary tarball
|
||||||
`.bash_profile`, `.bash_login` and `.profile` to source
|
|
||||||
`~/.nix-profile/etc/profile.d/nix.sh`. You can set the
|
|
||||||
`NIX_INSTALLER_NO_MODIFY_PROFILE` environment variable before executing
|
|
||||||
the install script to disable this behaviour.
|
|
||||||
|
|
||||||
# Multi User Installation
|
You can also download a binary tarball that contains Nix and all its dependencies:
|
||||||
|
- Choose a [version](https://releases.nixos.org/?prefix=nix/) and [system type](../contributing/hacking.md#platforms)
|
||||||
|
- Download and unpack the tarball
|
||||||
|
- Run the installer
|
||||||
|
|
||||||
The multi-user Nix installation creates system users, and a system
|
> **Example**
|
||||||
service for the Nix daemon.
|
|
||||||
|
|
||||||
**Supported Systems**
|
|
||||||
- Linux running systemd, with SELinux disabled
|
|
||||||
- macOS
|
|
||||||
|
|
||||||
You can instruct the installer to perform a multi-user installation on
|
|
||||||
your system:
|
|
||||||
|
|
||||||
```console
|
|
||||||
$ curl -L https://nixos.org/nix/install | sh -s -- --daemon
|
|
||||||
```
|
|
||||||
|
|
||||||
The multi-user installation of Nix will create build users between the
|
|
||||||
user IDs 30001 and 30032, and a group with the group ID 30000. You
|
|
||||||
can run this under your usual user account or root. The script
|
|
||||||
will invoke `sudo` as needed.
|
|
||||||
|
|
||||||
> **Note**
|
|
||||||
>
|
>
|
||||||
> If you need Nix to use a different group ID or user ID set, you will
|
> ```console
|
||||||
> have to download the tarball manually and [edit the install
|
> $ pushd $(mktemp -d)
|
||||||
> script](#installing-from-a-binary-tarball).
|
> $ export VERSION=2.19.2
|
||||||
|
> $ export SYSTEM=x86_64-linux
|
||||||
|
> $ curl -LO https://releases.nixos.org/nix/nix-$VERSION/nix-$VERSION-$SYSTEM.tar.xz
|
||||||
|
> $ tar xfj nix-$VERSION-$SYSTEM.tar.xz
|
||||||
|
> $ cd nix-$VERSION-$SYSTEM
|
||||||
|
> $ ./install
|
||||||
|
> $ popd
|
||||||
|
> ```
|
||||||
|
|
||||||
The installer will modify `/etc/bashrc`, and `/etc/zshrc` if they exist.
|
The installer can be customised with the environment variables declared in the file named `install-multi-user`.
|
||||||
The installer will first back up these files with a `.backup-before-nix`
|
|
||||||
extension. The installer will also create `/etc/profile.d/nix.sh`.
|
## Native packages for Linux distributions
|
||||||
|
|
||||||
|
The Nix community maintains installers for some Linux distributions in their native packaging format(https://nix-community.github.io/nix-installers/).
|
||||||
|
|
||||||
# macOS Installation
|
# macOS Installation
|
||||||
|
|
||||||
|
<!-- anchors to catch existing links -->
|
||||||
[]{#sect-macos-installation-change-store-prefix}[]{#sect-macos-installation-encrypted-volume}[]{#sect-macos-installation-symlink}[]{#sect-macos-installation-recommended-notes}
|
[]{#sect-macos-installation-change-store-prefix}[]{#sect-macos-installation-encrypted-volume}[]{#sect-macos-installation-symlink}[]{#sect-macos-installation-recommended-notes}
|
||||||
<!-- Note: anchors above to catch permalinks to old explanations -->
|
|
||||||
|
|
||||||
We believe we have ironed out how to cleanly support the read-only root
|
We believe we have ironed out how to cleanly support the read-only root file system
|
||||||
on modern macOS. New installs will do this automatically.
|
on modern macOS. New installs will do this automatically.
|
||||||
|
|
||||||
This section previously detailed the situation, options, and trade-offs,
|
This section previously detailed the situation, options, and trade-offs,
|
||||||
|
@ -126,33 +148,3 @@ this to run the installer, but it may help if you run into trouble:
|
||||||
boot process to avoid problems loading or restoring any programs that
|
boot process to avoid problems loading or restoring any programs that
|
||||||
need access to your Nix store
|
need access to your Nix store
|
||||||
|
|
||||||
# Installing a pinned Nix version from a URL
|
|
||||||
|
|
||||||
Version-specific installation URLs for all Nix versions
|
|
||||||
since 1.11.16 can be found at [releases.nixos.org](https://releases.nixos.org/?prefix=nix/).
|
|
||||||
The corresponding SHA-256 hash can be found in the directory for the given version.
|
|
||||||
|
|
||||||
These install scripts can be used the same as usual:
|
|
||||||
|
|
||||||
```console
|
|
||||||
$ curl -L https://releases.nixos.org/nix/nix-<version>/install | sh
|
|
||||||
```
|
|
||||||
|
|
||||||
# Installing from a binary tarball
|
|
||||||
|
|
||||||
You can also download a binary tarball that contains Nix and all its
|
|
||||||
dependencies. (This is what the install script at
|
|
||||||
<https://nixos.org/nix/install> does automatically.) You should unpack
|
|
||||||
it somewhere (e.g. in `/tmp`), and then run the script named `install`
|
|
||||||
inside the binary tarball:
|
|
||||||
|
|
||||||
```console
|
|
||||||
$ cd /tmp
|
|
||||||
$ tar xfj nix-1.8-x86_64-darwin.tar.bz2
|
|
||||||
$ cd nix-1.8-x86_64-darwin
|
|
||||||
$ ./install
|
|
||||||
```
|
|
||||||
|
|
||||||
If you need to edit the multi-user installation script to use different
|
|
||||||
group ID or a different user ID range, modify the variables set in the
|
|
||||||
file named `install-multi-user`.
|
|
||||||
|
|
|
@ -32,11 +32,15 @@
|
||||||
your distribution does not provide it, please install it from
|
your distribution does not provide it, please install it from
|
||||||
<http://www.sqlite.org/>.
|
<http://www.sqlite.org/>.
|
||||||
|
|
||||||
- The [Boehm garbage collector](http://www.hboehm.info/gc/) to reduce
|
- The [Boehm garbage collector (`bdw-gc`)](http://www.hboehm.info/gc/) to reduce
|
||||||
the evaluator’s memory consumption (optional). To enable it, install
|
the evaluator’s memory consumption (optional).
|
||||||
|
|
||||||
|
To enable it, install
|
||||||
`pkgconfig` and the Boehm garbage collector, and pass the flag
|
`pkgconfig` and the Boehm garbage collector, and pass the flag
|
||||||
`--enable-gc` to `configure`.
|
`--enable-gc` to `configure`.
|
||||||
|
|
||||||
|
For `bdw-gc` <= 8.2.4 Nix needs a [small patch](https://github.com/NixOS/nix/blob/ac4d2e7b857acdfeac35ac8a592bdecee2d29838/boehmgc-traceable_allocator-public.diff) to be applied.
|
||||||
|
|
||||||
- The `boost` library of version 1.66.0 or higher. It can be obtained
|
- The `boost` library of version 1.66.0 or higher. It can be obtained
|
||||||
from the official web site <https://www.boost.org/>.
|
from the official web site <https://www.boost.org/>.
|
||||||
|
|
||||||
|
@ -72,7 +76,7 @@
|
||||||
This is an optional dependency and can be disabled
|
This is an optional dependency and can be disabled
|
||||||
by providing a `--disable-cpuid` to the `configure` script.
|
by providing a `--disable-cpuid` to the `configure` script.
|
||||||
|
|
||||||
- Unless `./configure --disable-tests` is specified, GoogleTest (GTest) and
|
- Unless `./configure --disable-unit-tests` is specified, GoogleTest (GTest) and
|
||||||
RapidCheck are required, which are available at
|
RapidCheck are required, which are available at
|
||||||
<https://google.github.io/googletest/> and
|
<https://google.github.io/googletest/> and
|
||||||
<https://github.com/emil-e/rapidcheck> respectively.
|
<https://github.com/emil-e/rapidcheck> respectively.
|
||||||
|
|
|
@ -1,14 +1,40 @@
|
||||||
# Upgrading Nix
|
# Upgrading Nix
|
||||||
|
|
||||||
Multi-user Nix users on macOS can upgrade Nix by running: `sudo -i sh -c
|
> **Note**
|
||||||
'nix-channel --update &&
|
>
|
||||||
nix-env --install --attr nixpkgs.nix &&
|
> These upgrade instructions apply where Nix was installed following the [installation instructions in this manual](./index.md).
|
||||||
launchctl remove org.nixos.nix-daemon &&
|
|
||||||
launchctl load /Library/LaunchDaemons/org.nixos.nix-daemon.plist'`
|
|
||||||
|
|
||||||
Single-user installations of Nix should run this: `nix-channel --update;
|
Check which Nix version will be installed, for example from one of the [release channels](http://channels.nixos.org/) such as `nixpkgs-unstable`:
|
||||||
nix-env --install --attr nixpkgs.nix nixpkgs.cacert`
|
|
||||||
|
|
||||||
Multi-user Nix users on Linux should run this with sudo: `nix-channel
|
```console
|
||||||
--update; nix-env --install --attr nixpkgs.nix nixpkgs.cacert; systemctl
|
$ nix-shell -p nix -I nixpkgs=channel:nixpkgs-unstable --run "nix --version"
|
||||||
daemon-reload; systemctl restart nix-daemon`
|
nix (Nix) 2.18.1
|
||||||
|
```
|
||||||
|
|
||||||
|
> **Warning**
|
||||||
|
>
|
||||||
|
> Writing to the [local store](@docroot@/store/types/local-store.md) with a newer version of Nix, for example by building derivations with [`nix-build`](@docroot@/command-ref/nix-build.md) or [`nix-store --realise`](@docroot@/command-ref/nix-store/realise.md), may change the database schema!
|
||||||
|
> Reverting to an older version of Nix may therefore require purging the store database before it can be used.
|
||||||
|
|
||||||
|
## Linux multi-user
|
||||||
|
|
||||||
|
```console
|
||||||
|
$ sudo su
|
||||||
|
# nix-env --install --file '<nixpkgs>' --attr nix cacert -I nixpkgs=channel:nixpkgs-unstable
|
||||||
|
# systemctl daemon-reload
|
||||||
|
# systemctl restart nix-daemon
|
||||||
|
```
|
||||||
|
|
||||||
|
## macOS multi-user
|
||||||
|
|
||||||
|
```console
|
||||||
|
$ sudo nix-env --install --file '<nixpkgs>' --attr nix -I nixpkgs=channel:nixpkgs-unstable
|
||||||
|
$ sudo launchctl remove org.nixos.nix-daemon
|
||||||
|
$ sudo launchctl load /Library/LaunchDaemons/org.nixos.nix-daemon.plist
|
||||||
|
```
|
||||||
|
|
||||||
|
## Single-user all platforms
|
||||||
|
|
||||||
|
```console
|
||||||
|
$ nix-env --install --file '<nixpkgs>' --attr nix cacert -I nixpkgs=channel:nixpkgs-unstable
|
||||||
|
```
|
||||||
|
|
|
@ -188,9 +188,13 @@ Derivations can declare some infrequently used optional attributes.
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
The `outputHashAlgo` attribute specifies the hash algorithm used to
|
The `outputHash` attribute must be a string containing the hash in either hexadecimal or "nix32" encoding, or following the format for integrity metadata as defined by [SRI](https://www.w3.org/TR/SRI/).
|
||||||
compute the hash. It can currently be `"sha1"`, `"sha256"` or
|
The "nix32" encoding is an adaptation of base-32 encoding.
|
||||||
`"sha512"`.
|
The [`convertHash`](@docroot@/language/builtins.md#builtins-convertHash) function shows how to convert between different encodings, and the [`nix-hash` command](../command-ref/nix-hash.md) has information about obtaining the hash for some contents, as well as converting to and from encodings.
|
||||||
|
|
||||||
|
The `outputHashAlgo` attribute specifies the hash algorithm used to compute the hash.
|
||||||
|
It can currently be `"sha1"`, `"sha256"`, `"sha512"`, or `null`.
|
||||||
|
`outputHashAlgo` can only be `null` when `outputHash` follows the SRI format.
|
||||||
|
|
||||||
The `outputHashMode` attribute determines how the hash is computed.
|
The `outputHashMode` attribute determines how the hash is computed.
|
||||||
It must be one of the following two values:
|
It must be one of the following two values:
|
||||||
|
@ -209,11 +213,6 @@ Derivations can declare some infrequently used optional attributes.
|
||||||
this case, the output can be anything, including a directory
|
this case, the output can be anything, including a directory
|
||||||
tree.
|
tree.
|
||||||
|
|
||||||
The `outputHash` attribute, finally, must be a string containing
|
|
||||||
the hash in either hexadecimal or base-32 notation. (See the
|
|
||||||
[`nix-hash` command](../command-ref/nix-hash.md) for information
|
|
||||||
about converting to and from base-32 notation.)
|
|
||||||
|
|
||||||
- [`__contentAddressed`]{#adv-attr-__contentAddressed}
|
- [`__contentAddressed`]{#adv-attr-__contentAddressed}
|
||||||
> **Warning**
|
> **Warning**
|
||||||
> This attribute is part of an [experimental feature](@docroot@/contributing/experimental-features.md).
|
> This attribute is part of an [experimental feature](@docroot@/contributing/experimental-features.md).
|
||||||
|
@ -257,29 +256,18 @@ Derivations can declare some infrequently used optional attributes.
|
||||||
of the environment (typically, a few hundred kilobyte).
|
of the environment (typically, a few hundred kilobyte).
|
||||||
|
|
||||||
- [`preferLocalBuild`]{#adv-attr-preferLocalBuild}\
|
- [`preferLocalBuild`]{#adv-attr-preferLocalBuild}\
|
||||||
If this attribute is set to `true` and [distributed building is
|
If this attribute is set to `true` and [distributed building is enabled](@docroot@/command-ref/conf-file.md#conf-builders), then, if possible, the derivation will be built locally instead of being forwarded to a remote machine.
|
||||||
enabled](../advanced-topics/distributed-builds.md), then, if
|
This is useful for derivations that are cheapest to build locally.
|
||||||
possible, the derivation will be built locally instead of forwarded
|
|
||||||
to a remote machine. This is appropriate for trivial builders
|
|
||||||
where the cost of doing a download or remote build would exceed
|
|
||||||
the cost of building locally.
|
|
||||||
|
|
||||||
- [`allowSubstitutes`]{#adv-attr-allowSubstitutes}\
|
- [`allowSubstitutes`]{#adv-attr-allowSubstitutes}\
|
||||||
If this attribute is set to `false`, then Nix will always build this
|
If this attribute is set to `false`, then Nix will always build this derivation (locally or remotely); it will not try to substitute its outputs.
|
||||||
derivation; it will not try to substitute its outputs. This is
|
This is useful for derivations that are cheaper to build than to substitute.
|
||||||
useful for very trivial derivations (such as `writeText` in Nixpkgs)
|
|
||||||
that are cheaper to build than to substitute from a binary cache.
|
|
||||||
|
|
||||||
You may disable the effects of this attibute by enabling the
|
This attribute can be ignored by setting [`always-allow-substitutes`](@docroot@/command-ref/conf-file.md#conf-always-allow-substitutes) to `true`.
|
||||||
`always-allow-substitutes` configuration option in Nix.
|
|
||||||
|
|
||||||
> **Note**
|
> **Note**
|
||||||
>
|
>
|
||||||
> You need to have a builder configured which satisfies the
|
> If set to `false`, the [`builder`](./derivations.md#attr-builder) should be able to run on the system type specified in the [`system` attribute](./derivations.md#attr-system), since the derivation cannot be substituted.
|
||||||
> derivation’s `system` attribute, since the derivation cannot be
|
|
||||||
> substituted. Thus it is usually a good idea to align `system` with
|
|
||||||
> `builtins.currentSystem` when setting `allowSubstitutes` to
|
|
||||||
> `false`. For most trivial derivations this should be the case.
|
|
||||||
|
|
||||||
- [`__structuredAttrs`]{#adv-attr-structuredAttrs}\
|
- [`__structuredAttrs`]{#adv-attr-structuredAttrs}\
|
||||||
If the special attribute `__structuredAttrs` is set to `true`, the other derivation
|
If the special attribute `__structuredAttrs` is set to `true`, the other derivation
|
||||||
|
|
|
@ -36,7 +36,7 @@ It outputs an attribute set, and produces a [store derivation] as a side effect
|
||||||
The system type on which the [`builder`](#attr-builder) executable is meant to be run.
|
The system type on which the [`builder`](#attr-builder) executable is meant to be run.
|
||||||
|
|
||||||
A necessary condition for Nix to build derivations locally is that the `system` attribute matches the current [`system` configuration option].
|
A necessary condition for Nix to build derivations locally is that the `system` attribute matches the current [`system` configuration option].
|
||||||
It can automatically [build on other platforms](../advanced-topics/distributed-builds.md) by forwarding build requests to other machines.
|
It can automatically [build on other platforms](@docroot@/language/derivations.md#attr-builder) by forwarding build requests to other machines.
|
||||||
|
|
||||||
[`system` configuration option]: @docroot@/command-ref/conf-file.md#conf-system
|
[`system` configuration option]: @docroot@/command-ref/conf-file.md#conf-system
|
||||||
|
|
||||||
|
@ -274,7 +274,7 @@ The [`builder`](#attr-builder) is executed as follows:
|
||||||
directory (typically, `/nix/store`).
|
directory (typically, `/nix/store`).
|
||||||
|
|
||||||
- `NIX_ATTRS_JSON_FILE` & `NIX_ATTRS_SH_FILE` if `__structuredAttrs`
|
- `NIX_ATTRS_JSON_FILE` & `NIX_ATTRS_SH_FILE` if `__structuredAttrs`
|
||||||
is set to `true` for the dervation. A detailed explanation of this
|
is set to `true` for the derivation. A detailed explanation of this
|
||||||
behavior can be found in the
|
behavior can be found in the
|
||||||
[section about structured attrs](./advanced-attributes.md#adv-attr-structuredAttrs).
|
[section about structured attrs](./advanced-attributes.md#adv-attr-structuredAttrs).
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,8 @@
|
||||||
# Import From Derivation
|
# Import From Derivation
|
||||||
|
|
||||||
The value of a Nix expression can depend on the contents of a [store object](@docroot@/glossary.md#gloss-store-object).
|
The value of a Nix expression can depend on the contents of a [store object].
|
||||||
|
|
||||||
|
[store object]: @docroot@/glossary.md#gloss-store-object
|
||||||
|
|
||||||
Passing an expression `expr` that evaluates to a [store path](@docroot@/glossary.md#gloss-store-path) to any built-in function which reads from the filesystem constitutes Import From Derivation (IFD):
|
Passing an expression `expr` that evaluates to a [store path](@docroot@/glossary.md#gloss-store-path) to any built-in function which reads from the filesystem constitutes Import From Derivation (IFD):
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,13 @@
|
||||||
# Nix Language
|
# Nix Language
|
||||||
|
|
||||||
The Nix language is designed for conveniently creating and composing *derivations* – precise descriptions of how contents of existing files are used to derive new files.
|
The Nix language is designed for conveniently creating and composing *derivations* – precise descriptions of how contents of existing files are used to derive new files.
|
||||||
It is:
|
|
||||||
|
> **Tip**
|
||||||
|
>
|
||||||
|
> These pages are written as a reference.
|
||||||
|
> If you are learning Nix, nix.dev has a good [introduction to the Nix language](https://nix.dev/tutorials/nix-language).
|
||||||
|
|
||||||
|
The language is:
|
||||||
|
|
||||||
- *domain-specific*
|
- *domain-specific*
|
||||||
|
|
||||||
|
@ -432,6 +438,32 @@ This is an incomplete overview of language features, by example.
|
||||||
|
|
||||||
</td>
|
</td>
|
||||||
</tr>
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
`inherit pkgs src;`
|
||||||
|
|
||||||
|
</td>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
Adds the variables to the current scope (attribute set or `let` binding).
|
||||||
|
Desugars to `pkgs = pkgs; src = src;`
|
||||||
|
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
<tr>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
`inherit (pkgs) lib stdenv;`
|
||||||
|
|
||||||
|
</td>
|
||||||
|
<td>
|
||||||
|
|
||||||
|
Adds the attributes, from the attribute set in parentheses, to the current scope (attribute set or `let` binding).
|
||||||
|
Desugars to `lib = pkgs.lib; stdenv = pkgs.stdenv;`
|
||||||
|
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
<tr>
|
<tr>
|
||||||
<td>
|
<td>
|
||||||
|
|
||||||
|
|
|
@ -84,7 +84,7 @@ The `+` operator is overloaded to also work on strings and paths.
|
||||||
>
|
>
|
||||||
> *string* `+` *string*
|
> *string* `+` *string*
|
||||||
|
|
||||||
Concatenate two [string]s and merge their string contexts.
|
Concatenate two [strings][string] and merge their string contexts.
|
||||||
|
|
||||||
[String concatenation]: #string-concatenation
|
[String concatenation]: #string-concatenation
|
||||||
|
|
||||||
|
@ -94,7 +94,7 @@ Concatenate two [string]s and merge their string contexts.
|
||||||
>
|
>
|
||||||
> *path* `+` *path*
|
> *path* `+` *path*
|
||||||
|
|
||||||
Concatenate two [path]s.
|
Concatenate two [paths][path].
|
||||||
The result is a path.
|
The result is a path.
|
||||||
|
|
||||||
[Path concatenation]: #path-concatenation
|
[Path concatenation]: #path-concatenation
|
||||||
|
@ -150,9 +150,9 @@ If an attribute name is present in both, the attribute value from the latter is
|
||||||
|
|
||||||
Comparison is
|
Comparison is
|
||||||
|
|
||||||
- [arithmetic] for [number]s
|
- [arithmetic] for [numbers][number]
|
||||||
- lexicographic for [string]s and [path]s
|
- lexicographic for [strings][string] and [paths][path]
|
||||||
- item-wise lexicographic for [list]s:
|
- item-wise lexicographic for [lists][list]:
|
||||||
elements at the same index in both lists are compared according to their type and skipped if they are equal.
|
elements at the same index in both lists are compared according to their type and skipped if they are equal.
|
||||||
|
|
||||||
All comparison operators are implemented in terms of `<`, and the following equivalencies hold:
|
All comparison operators are implemented in terms of `<`, and the following equivalencies hold:
|
||||||
|
@ -163,12 +163,12 @@ All comparison operators are implemented in terms of `<`, and the following equi
|
||||||
| *a* `>` *b* | *b* `<` *a* |
|
| *a* `>` *b* | *b* `<` *a* |
|
||||||
| *a* `>=` *b* | `! (` *a* `<` *b* `)` |
|
| *a* `>=` *b* | `! (` *a* `<` *b* `)` |
|
||||||
|
|
||||||
[Comparison]: #comparison-operators
|
[Comparison]: #comparison
|
||||||
|
|
||||||
## Equality
|
## Equality
|
||||||
|
|
||||||
- [Attribute sets][attribute set] and [list]s are compared recursively, and therefore are fully evaluated.
|
- [Attribute sets][attribute set] and [lists][list] are compared recursively, and therefore are fully evaluated.
|
||||||
- Comparison of [function]s always returns `false`.
|
- Comparison of [functions][function] always returns `false`.
|
||||||
- Numbers are type-compatible, see [arithmetic] operators.
|
- Numbers are type-compatible, see [arithmetic] operators.
|
||||||
- Floating point numbers only differ up to a limited precision.
|
- Floating point numbers only differ up to a limited precision.
|
||||||
|
|
||||||
|
|
|
@ -20,6 +20,8 @@ Rather than writing
|
||||||
|
|
||||||
(where `freetype` is a [derivation]), you can instead write
|
(where `freetype` is a [derivation]), you can instead write
|
||||||
|
|
||||||
|
[derivation]: ../glossary.md#gloss-derivation
|
||||||
|
|
||||||
```nix
|
```nix
|
||||||
"--with-freetype2-library=${freetype}/lib"
|
"--with-freetype2-library=${freetype}/lib"
|
||||||
```
|
```
|
||||||
|
@ -189,7 +191,7 @@ If neither is present, an error is thrown.
|
||||||
> "${a}"
|
> "${a}"
|
||||||
> ```
|
> ```
|
||||||
>
|
>
|
||||||
> error: cannot coerce a set to a string
|
> error: cannot coerce a set to a string: { }
|
||||||
>
|
>
|
||||||
> at «string»:4:2:
|
> at «string»:4:2:
|
||||||
>
|
>
|
||||||
|
|
|
@ -156,6 +156,8 @@ function and the fifth being a set.
|
||||||
|
|
||||||
Note that lists are only lazy in values, and they are strict in length.
|
Note that lists are only lazy in values, and they are strict in length.
|
||||||
|
|
||||||
|
Elements in a list can be accessed using [`builtins.elemAt`](./builtins.md#builtins-elemAt).
|
||||||
|
|
||||||
## Attribute Set
|
## Attribute Set
|
||||||
|
|
||||||
An attribute set is a collection of name-value-pairs (called *attributes*) enclosed in curly brackets (`{ }`).
|
An attribute set is a collection of name-value-pairs (called *attributes*) enclosed in curly brackets (`{ }`).
|
||||||
|
|
71
doc/manual/src/protocols/json/derivation.md
Normal file
71
doc/manual/src/protocols/json/derivation.md
Normal file
|
@ -0,0 +1,71 @@
|
||||||
|
# Derivation JSON Format
|
||||||
|
|
||||||
|
> **Warning**
|
||||||
|
>
|
||||||
|
> This JSON format is currently
|
||||||
|
> [**experimental**](@docroot@/contributing/experimental-features.md#xp-feature-nix-command)
|
||||||
|
> and subject to change.
|
||||||
|
|
||||||
|
The JSON serialization of a
|
||||||
|
[derivations](@docroot@/glossary.md#gloss-store-derivation)
|
||||||
|
is a JSON object with the following fields:
|
||||||
|
|
||||||
|
* `name`:
|
||||||
|
The name of the derivation.
|
||||||
|
This is used when calculating the store paths of the derivation's outputs.
|
||||||
|
|
||||||
|
* `outputs`:
|
||||||
|
Information about the output paths of the derivation.
|
||||||
|
This is a JSON object with one member per output, where the key is the output name and the value is a JSON object with these fields:
|
||||||
|
|
||||||
|
* `path`: The output path.
|
||||||
|
|
||||||
|
* `hashAlgo`:
|
||||||
|
For fixed-output derivations, the hashing algorithm (e.g. `sha256`), optionally prefixed by `r:` if `hash` denotes a NAR hash rather than a flat file hash.
|
||||||
|
|
||||||
|
* `hash`:
|
||||||
|
For fixed-output derivations, the expected content hash in base-16.
|
||||||
|
|
||||||
|
> **Example**
|
||||||
|
>
|
||||||
|
> ```json
|
||||||
|
> "outputs": {
|
||||||
|
> "out": {
|
||||||
|
> "path": "/nix/store/2543j7c6jn75blc3drf4g5vhb1rhdq29-source",
|
||||||
|
> "hashAlgo": "r:sha256",
|
||||||
|
> "hash": "6fc80dcc62179dbc12fc0b5881275898f93444833d21b89dfe5f7fbcbb1d0d62"
|
||||||
|
> }
|
||||||
|
> }
|
||||||
|
> ```
|
||||||
|
|
||||||
|
* `inputSrcs`:
|
||||||
|
A list of store paths on which this derivation depends.
|
||||||
|
|
||||||
|
* `inputDrvs`:
|
||||||
|
A JSON object specifying the derivations on which this derivation depends, and what outputs of those derivations.
|
||||||
|
|
||||||
|
> **Example**
|
||||||
|
>
|
||||||
|
> ```json
|
||||||
|
> "inputDrvs": {
|
||||||
|
> "/nix/store/6lkh5yi7nlb7l6dr8fljlli5zfd9hq58-curl-7.73.0.drv": ["dev"],
|
||||||
|
> "/nix/store/fn3kgnfzl5dzym26j8g907gq3kbm8bfh-unzip-6.0.drv": ["out"]
|
||||||
|
> }
|
||||||
|
> ```
|
||||||
|
|
||||||
|
specifies that this derivation depends on the `dev` output of `curl`, and the `out` output of `unzip`.
|
||||||
|
|
||||||
|
* `system`:
|
||||||
|
The system type on which this derivation is to be built
|
||||||
|
(e.g. `x86_64-linux`).
|
||||||
|
|
||||||
|
* `builder`:
|
||||||
|
The absolute path of the program to be executed to run the build.
|
||||||
|
Typically this is the `bash` shell
|
||||||
|
(e.g. `/nix/store/r3j288vpmczbl500w6zz89gyfa4nr0b1-bash-4.4-p23/bin/bash`).
|
||||||
|
|
||||||
|
* `args`:
|
||||||
|
The command-line arguments passed to the `builder`.
|
||||||
|
|
||||||
|
* `env`:
|
||||||
|
The environment passed to the `builder`.
|
98
doc/manual/src/protocols/json/store-object-info.md
Normal file
98
doc/manual/src/protocols/json/store-object-info.md
Normal file
|
@ -0,0 +1,98 @@
|
||||||
|
# Store object info JSON format
|
||||||
|
|
||||||
|
> **Warning**
|
||||||
|
>
|
||||||
|
> This JSON format is currently
|
||||||
|
> [**experimental**](@docroot@/contributing/experimental-features.md#xp-feature-nix-command)
|
||||||
|
> and subject to change.
|
||||||
|
|
||||||
|
Info about a [store object].
|
||||||
|
|
||||||
|
* `path`:
|
||||||
|
|
||||||
|
[Store path][store path] to the given store object.
|
||||||
|
|
||||||
|
* `narHash`:
|
||||||
|
|
||||||
|
Hash of the [file system object] part of the store object when serialized as a [Nix Archive].
|
||||||
|
|
||||||
|
* `narSize`:
|
||||||
|
|
||||||
|
Size of the [file system object] part of the store object when serialized as a [Nix Archive].
|
||||||
|
|
||||||
|
* `references`:
|
||||||
|
|
||||||
|
An array of [store paths][store path], possibly including this one.
|
||||||
|
|
||||||
|
* `ca` (optional):
|
||||||
|
|
||||||
|
Content address of this store object's file system object, used to compute its store path.
|
||||||
|
|
||||||
|
[store path]: @docroot@/glossary.md#gloss-store-path
|
||||||
|
[file system object]: @docroot@/store/file-system-object.md
|
||||||
|
[Nix Archive]: @docroot@/glossary.md#gloss-nar
|
||||||
|
|
||||||
|
## Impure fields
|
||||||
|
|
||||||
|
These are not intrinsic properties of the store object.
|
||||||
|
In other words, the same store object residing in different store could have different values for these properties.
|
||||||
|
|
||||||
|
* `deriver` (optional):
|
||||||
|
|
||||||
|
The path to the [derivation] from which this store object is produced.
|
||||||
|
|
||||||
|
[derivation]: @docroot@/glossary.md#gloss-store-derivation
|
||||||
|
|
||||||
|
* `registrationTime` (optional):
|
||||||
|
|
||||||
|
When this derivation was added to the store.
|
||||||
|
|
||||||
|
* `ultimate` (optional):
|
||||||
|
|
||||||
|
Whether this store object is trusted because we built it ourselves, rather than substituted a build product from elsewhere.
|
||||||
|
|
||||||
|
* `signatures` (optional):
|
||||||
|
|
||||||
|
Signatures claiming that this store object is what it claims to be.
|
||||||
|
Not relevant for [content-addressed] store objects,
|
||||||
|
but useful for [input-addressed] store objects.
|
||||||
|
|
||||||
|
[content-addressed]: @docroot@/glossary.md#gloss-content-addressed-store-object
|
||||||
|
[input-addressed]: @docroot@/glossary.md#gloss-input-addressed-store-object
|
||||||
|
|
||||||
|
### `.narinfo` extra fields
|
||||||
|
|
||||||
|
This meta data is specific to the "binary cache" family of Nix store types.
|
||||||
|
This information is not intrinsic to the store object, but about how it is stored.
|
||||||
|
|
||||||
|
* `url`:
|
||||||
|
|
||||||
|
Where to download a compressed archive of the file system objects of this store object.
|
||||||
|
|
||||||
|
* `compression`:
|
||||||
|
|
||||||
|
The compression format that the archive is in.
|
||||||
|
|
||||||
|
* `fileHash`:
|
||||||
|
|
||||||
|
A digest for the compressed archive itself, as opposed to the data contained within.
|
||||||
|
|
||||||
|
* `fileSize`:
|
||||||
|
|
||||||
|
The size of the compressed archive itself.
|
||||||
|
|
||||||
|
## Computed closure fields
|
||||||
|
|
||||||
|
These fields are not stored at all, but computed by traverising the other other fields across all the store objects in a [closure].
|
||||||
|
|
||||||
|
* `closureSize`:
|
||||||
|
|
||||||
|
The total size of the compressed archive itself for this object, and the compressed archive of every object in this object's [closure].
|
||||||
|
|
||||||
|
### `.narinfo` extra fields
|
||||||
|
|
||||||
|
* `closureSize`:
|
||||||
|
|
||||||
|
The total size of this store object and every other object in its [closure].
|
||||||
|
|
||||||
|
[closure]: @docroot@/glossary.md#gloss-closure
|
131
doc/manual/src/protocols/store-path.md
Normal file
131
doc/manual/src/protocols/store-path.md
Normal file
|
@ -0,0 +1,131 @@
|
||||||
|
# Complete Store Path Calculation
|
||||||
|
|
||||||
|
This is the complete specification for how store paths are calculated.
|
||||||
|
|
||||||
|
The format of this specification is close to [Extended Backus–Naur form](https://en.wikipedia.org/wiki/Extended_Backus%E2%80%93Naur_form), but must deviate for a few things such as hash functions which we treat as bidirectional for specification purposes.
|
||||||
|
|
||||||
|
Regular users do *not* need to know this information --- store paths can be treated as black boxes computed from the properties of the store objects they refer to.
|
||||||
|
But for those interested in exactly how Nix works, e.g. if they are reimplementing it, this information can be useful.
|
||||||
|
|
||||||
|
## Store path proper
|
||||||
|
|
||||||
|
```ebnf
|
||||||
|
store-path = store-dir "/" digest "-" name
|
||||||
|
```
|
||||||
|
where
|
||||||
|
|
||||||
|
- `name` = the name of the store object.
|
||||||
|
|
||||||
|
- `store-dir` = the [store directory](@docroot@/store/store-path.md#store-directory)
|
||||||
|
|
||||||
|
- `digest` = base-32 representation of the first 160 bits of a [SHA-256] hash of `fingerprint`
|
||||||
|
|
||||||
|
This the hash part of the store name
|
||||||
|
|
||||||
|
## Fingerprint
|
||||||
|
|
||||||
|
- ```ebnf
|
||||||
|
fingerprint = type ":" sha256 ":" inner-digest ":" store ":" name
|
||||||
|
```
|
||||||
|
|
||||||
|
Note that it includes the location of the store as well as the name to make sure that changes to either of those are reflected in the hash
|
||||||
|
(e.g. you won't get `/nix/store/<digest>-name1` and `/nix/store/<digest>-name2`, or `/gnu/store/<digest>-name1`, with equal hash parts).
|
||||||
|
|
||||||
|
- `type` = one of:
|
||||||
|
|
||||||
|
- ```ebnf
|
||||||
|
| "text" ( ":" store-path )*
|
||||||
|
```
|
||||||
|
|
||||||
|
for encoded derivations written to the store.
|
||||||
|
The optional trailing store paths are the references of the store object.
|
||||||
|
|
||||||
|
- ```ebnf
|
||||||
|
| "source" ( ":" store-path )*
|
||||||
|
```
|
||||||
|
|
||||||
|
For paths copied to the store and hashed via a [Nix Archive (NAR)] and [SHA-256][sha-256].
|
||||||
|
Just like in the text case, we can have the store objects referenced by their paths.
|
||||||
|
Additionally, we can have an optional `:self` label to denote self reference.
|
||||||
|
|
||||||
|
- ```ebnf
|
||||||
|
| "output:" id
|
||||||
|
```
|
||||||
|
|
||||||
|
For either the outputs built from derivations,
|
||||||
|
paths copied to the store hashed that area single file hashed directly, or the via a hash algorithm other than [SHA-256][sha-256].
|
||||||
|
(in that case "source" is used; this is only necessary for compatibility).
|
||||||
|
|
||||||
|
`id` is the name of the output (usually, "out").
|
||||||
|
For content-addressed store objects, `id`, is always "out".
|
||||||
|
|
||||||
|
- `inner-digest` = base-16 representation of a SHA-256 hash of `inner-fingerprint`
|
||||||
|
|
||||||
|
## Inner fingerprint
|
||||||
|
|
||||||
|
- `inner-fingerprint` = one of the following based on `type`:
|
||||||
|
|
||||||
|
- if `type` = `"text:" ...`:
|
||||||
|
|
||||||
|
the string written to the resulting store path.
|
||||||
|
|
||||||
|
- if `type` = `"source:" ...`:
|
||||||
|
|
||||||
|
the the hash of the [Nix Archive (NAR)] serialization of the [file system object](@docroot@/store/file-system-object.md) of the store object.
|
||||||
|
|
||||||
|
- if `type` = `"output:" id`:
|
||||||
|
|
||||||
|
- For input-addressed derivation outputs:
|
||||||
|
|
||||||
|
the [ATerm](@docroot@/protocols/derivation-aterm.md) serialization of the derivation modulo fixed output derivations.
|
||||||
|
|
||||||
|
- For content-addressed store paths:
|
||||||
|
|
||||||
|
```ebnf
|
||||||
|
"fixed:out:" rec algo ":" hash ":"
|
||||||
|
```
|
||||||
|
|
||||||
|
where
|
||||||
|
|
||||||
|
- `rec` = one of:
|
||||||
|
|
||||||
|
- ```ebnf
|
||||||
|
| ""
|
||||||
|
```
|
||||||
|
(empty string) for hashes of the flat (single file) serialization
|
||||||
|
|
||||||
|
- ```ebnf
|
||||||
|
| "r:"
|
||||||
|
```
|
||||||
|
hashes of the for [Nix Archive (NAR)] (arbitrary file system object) serialization
|
||||||
|
|
||||||
|
- ```ebnf
|
||||||
|
| "git:"
|
||||||
|
```
|
||||||
|
hashes of the [Git blob/tree](https://git-scm.com/book/en/v2/Git-Internals-Git-Objects) [Merkel tree](https://en.wikipedia.org/wiki/Merkle_tree) format
|
||||||
|
|
||||||
|
- ```ebnf
|
||||||
|
algo = "md5" | "sha1" | "sha256"
|
||||||
|
```
|
||||||
|
|
||||||
|
- `hash` = base-16 representation of the path or flat hash of the contents of the path (or expected contents of the path for fixed-output derivations).
|
||||||
|
|
||||||
|
Note that `id` = `"out"`, regardless of the name part of the store path.
|
||||||
|
Also note that NAR + SHA-256 must not use this case, and instead must use the `type` = `"source:" ...` case.
|
||||||
|
|
||||||
|
[Nix Archive (NAR)]: @docroot@/glossary.md#gloss-NAR
|
||||||
|
[sha-256]: https://en.m.wikipedia.org/wiki/SHA-256
|
||||||
|
|
||||||
|
### Historical Note
|
||||||
|
|
||||||
|
The `type` = `"source:" ...` and `type` = `"output:out"` grammars technically overlap in purpose,
|
||||||
|
in that both can represent data hashed by its SHA-256 NAR serialization.
|
||||||
|
|
||||||
|
The original reason for this way of computing names was to prevent name collisions (for security).
|
||||||
|
For instance, the thinking was that it shouldn't be feasible to come up with a derivation whose output path collides with the path for a copied source.
|
||||||
|
The former would have an `inner-fingerprint` starting with `output:out:`, while the latter would have an `inner-fingerprint` starting with `source:`.
|
||||||
|
|
||||||
|
Since `64519cfd657d024ae6e2bb74cb21ad21b886fd2a` (2008), however, it was decided that separating derivation-produced vs manually-hashed content-addressed data like this was not useful.
|
||||||
|
Now, data that is content-addressed with SHA-256 + NAR-serialization always uses the `source:...` construction, regardless of how it was produced (manually or by derivation).
|
||||||
|
This allows freely switching between using [fixed-output derivations](@docroot@/glossary.md#gloss-fixed-output-derivation) for fetching, and fetching out-of-band and then manually adding.
|
||||||
|
It also removes the ambiguity from the grammar.
|
|
@ -1,99 +1,43 @@
|
||||||
# Quick Start
|
# Quick Start
|
||||||
|
|
||||||
This chapter is for impatient people who don't like reading
|
This chapter is for impatient people who don't like reading documentation.
|
||||||
documentation. For more in-depth information you are kindly referred
|
For more in-depth information you are kindly referred to subsequent chapters.
|
||||||
to subsequent chapters.
|
|
||||||
|
|
||||||
1. Install Nix by running the following:
|
1. Install Nix:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ curl -L https://nixos.org/nix/install | sh
|
$ curl -L https://nixos.org/nix/install | sh
|
||||||
```
|
```
|
||||||
|
|
||||||
The install script will use `sudo`, so make sure you have sufficient rights.
|
The install script will use `sudo`, so make sure you have sufficient rights.
|
||||||
On Linux, `--daemon` can be omitted for a single-user install.
|
|
||||||
|
|
||||||
For other installation methods, see [here](installation/index.md).
|
For other installation methods, see the detailed [installation instructions](installation/index.md).
|
||||||
|
|
||||||
1. See what installable packages are currently available in the
|
1. Run software without installing it permanently:
|
||||||
channel:
|
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-env --query --available --attr-path
|
$ nix-shell --packages cowsay lolcat
|
||||||
nixpkgs.docbook_xml_dtd_43 docbook-xml-4.3
|
|
||||||
nixpkgs.docbook_xml_dtd_45 docbook-xml-4.5
|
|
||||||
nixpkgs.firefox firefox-33.0.2
|
|
||||||
nixpkgs.hello hello-2.9
|
|
||||||
nixpkgs.libxslt libxslt-1.1.28
|
|
||||||
…
|
|
||||||
```
|
```
|
||||||
|
|
||||||
1. Install some packages from the channel:
|
This downloads the specified packages with all their dependencies, and drops you into a Bash shell where the commands provided by those packages are present.
|
||||||
|
This will not affect your normal environment:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-env --install --attr nixpkgs.hello
|
[nix-shell:~]$ cowsay Hello, Nix! | lolcat
|
||||||
```
|
```
|
||||||
|
|
||||||
This should download pre-built packages; it should not build them
|
Exiting the shell will make the programs disappear again:
|
||||||
locally (if it does, something went wrong).
|
|
||||||
|
|
||||||
1. Test that they work:
|
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ which hello
|
|
||||||
/home/eelco/.nix-profile/bin/hello
|
|
||||||
$ hello
|
|
||||||
Hello, world!
|
|
||||||
```
|
|
||||||
|
|
||||||
1. Uninstall a package:
|
|
||||||
|
|
||||||
```console
|
|
||||||
$ nix-env --uninstall hello
|
|
||||||
```
|
|
||||||
|
|
||||||
1. You can also test a package without installing it:
|
|
||||||
|
|
||||||
```console
|
|
||||||
$ nix-shell --packages hello
|
|
||||||
```
|
|
||||||
|
|
||||||
This builds or downloads GNU Hello and its dependencies, then drops
|
|
||||||
you into a Bash shell where the `hello` command is present, all
|
|
||||||
without affecting your normal environment:
|
|
||||||
|
|
||||||
```console
|
|
||||||
[nix-shell:~]$ hello
|
|
||||||
Hello, world!
|
|
||||||
|
|
||||||
[nix-shell:~]$ exit
|
[nix-shell:~]$ exit
|
||||||
|
$ lolcat
|
||||||
$ hello
|
lolcat: command not found
|
||||||
hello: command not found
|
|
||||||
```
|
```
|
||||||
|
|
||||||
1. To keep up-to-date with the channel, do:
|
1. Search for more packages on [search.nixos.org](https://search.nixos.org/) to try them out.
|
||||||
|
|
||||||
|
1. Free up storage space:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
$ nix-channel --update nixpkgs
|
$ nix-collect-garbage
|
||||||
$ nix-env --upgrade '*'
|
|
||||||
```
|
|
||||||
|
|
||||||
The latter command will upgrade each installed package for which
|
|
||||||
there is a “newer” version (as determined by comparing the version
|
|
||||||
numbers).
|
|
||||||
|
|
||||||
1. If you're unhappy with the result of a `nix-env` action (e.g., an
|
|
||||||
upgraded package turned out not to work properly), you can go back:
|
|
||||||
|
|
||||||
```console
|
|
||||||
$ nix-env --rollback
|
|
||||||
```
|
|
||||||
|
|
||||||
1. You should periodically run the Nix garbage collector to get rid of
|
|
||||||
unused packages, since uninstalls or upgrades don't actually delete
|
|
||||||
them:
|
|
||||||
|
|
||||||
```console
|
|
||||||
$ nix-collect-garbage --delete-old
|
|
||||||
```
|
```
|
||||||
|
|
202
doc/manual/src/release-notes/rl-2.20.md
Normal file
202
doc/manual/src/release-notes/rl-2.20.md
Normal file
|
@ -0,0 +1,202 @@
|
||||||
|
# Release 2.20.0 (2024-01-29)
|
||||||
|
|
||||||
|
- Option `allowed-uris` can now match whole schemes in URIs without slashes [#9547](https://github.com/NixOS/nix/pull/9547)
|
||||||
|
|
||||||
|
If a scheme, such as `github:` is specified in the `allowed-uris` option, all URIs starting with `github:` are allowed.
|
||||||
|
Previously this only worked for schemes whose URIs used the `://` syntax.
|
||||||
|
|
||||||
|
- Include cgroup stats when building through the daemon [#9598](https://github.com/NixOS/nix/pull/9598)
|
||||||
|
|
||||||
|
Nix now also reports cgroup statistics when building through the Nix daemon and when doing remote builds using `ssh-ng`,
|
||||||
|
if both sides of the connection are using Nix 2.20 or newer.
|
||||||
|
|
||||||
|
- Disallow empty search regex in `nix search` [#9481](https://github.com/NixOS/nix/pull/9481)
|
||||||
|
|
||||||
|
[`nix search`](@docroot@/command-ref/new-cli/nix3-search.md) now requires a search regex to be passed. To show all packages, use `^`.
|
||||||
|
|
||||||
|
- Add new `eval-system` setting [#4093](https://github.com/NixOS/nix/pull/4093)
|
||||||
|
|
||||||
|
Add a new `eval-system` option.
|
||||||
|
Unlike `system`, it just overrides the value of `builtins.currentSystem`.
|
||||||
|
This is more useful than overriding `system`, because you can build these derivations on remote builders which can work on the given system.
|
||||||
|
In contrast, `system` also affects scheduling which will cause Nix to build those derivations locally even if that doesn't make sense.
|
||||||
|
|
||||||
|
`eval-system` only takes effect if it is non-empty.
|
||||||
|
If empty (the default) `system` is used as before, so there is no breakage.
|
||||||
|
|
||||||
|
- Import-from-derivation builds the derivation in the build store [#9661](https://github.com/NixOS/nix/pull/9661)
|
||||||
|
|
||||||
|
When using `--eval-store`, `import`ing from a derivation will now result in the derivation being built on the build store, i.e. the store specified in the `store` Nix option.
|
||||||
|
|
||||||
|
Because the resulting Nix expression must be copied back to the evaluation store in order to be imported, this requires the evaluation store to trust the build store's signatures.
|
||||||
|
|
||||||
|
- Mounted SSH Store [#7890](https://github.com/NixOS/nix/issues/7890) [#7912](https://github.com/NixOS/nix/pull/7912)
|
||||||
|
|
||||||
|
Introduced the store [`mounted-ssh-ng://`](@docroot@/command-ref/new-cli/nix3-help-stores.md).
|
||||||
|
This store allows full access to a Nix store on a remote machine and additionally requires that the store be mounted in the local filesystem.
|
||||||
|
|
||||||
|
- Rename `nix show-config` to `nix config show` [#7672](https://github.com/NixOS/nix/issues/7672) [#9477](https://github.com/NixOS/nix/pull/9477)
|
||||||
|
|
||||||
|
`nix show-config` was renamed to `nix config show`, and `nix doctor` was renamed to `nix config check`, to be more consistent with the rest of the command line interface.
|
||||||
|
|
||||||
|
- Add command `nix hash convert` [#9452](https://github.com/NixOS/nix/pull/9452)
|
||||||
|
|
||||||
|
This replaces the old `nix hash to-*` commands, which are still available but will emit a deprecation warning. Please convert as follows:
|
||||||
|
|
||||||
|
- `nix hash to-base16 $hash1 $hash2`: Use `nix hash convert --to base16 $hash1 $hash2` instead.
|
||||||
|
- `nix hash to-base32 $hash1 $hash2`: Use `nix hash convert --to nix32 $hash1 $hash2` instead.
|
||||||
|
- `nix hash to-base64 $hash1 $hash2`: Use `nix hash convert --to base64 $hash1 $hash2` instead.
|
||||||
|
- `nix hash to-sri $hash1 $hash2`: : Use `nix hash convert --to sri $hash1 $hash2` or even just `nix hash convert $hash1 $hash2` instead.
|
||||||
|
|
||||||
|
- Rename hash format `base32` to `nix32` [#9452](https://github.com/NixOS/nix/pull/9452)
|
||||||
|
|
||||||
|
Hash format `base32` was renamed to `nix32` since it used a special Nix-specific character set for
|
||||||
|
[Base32](https://en.wikipedia.org/wiki/Base32).
|
||||||
|
|
||||||
|
- `nix profile` now allows referring to elements by human-readable names [#8678](https://github.com/NixOS/nix/pull/8678)
|
||||||
|
|
||||||
|
[`nix profile`](@docroot@/command-ref/new-cli/nix3-profile.md) now uses names to refer to installed packages when running [`list`](@docroot@/command-ref/new-cli/nix3-profile-list.md), [`remove`](@docroot@/command-ref/new-cli/nix3-profile-remove.md) or [`upgrade`](@docroot@/command-ref/new-cli/nix3-profile-upgrade.md) as opposed to indices. Profile element names are generated when a package is installed and remain the same until the package is removed.
|
||||||
|
|
||||||
|
**Warning**: The `manifest.nix` file used to record the contents of profiles has changed. Nix will automatically upgrade profiles to the new version when you modify the profile. After that, the profile can no longer be used by older versions of Nix.
|
||||||
|
|
||||||
|
- Give `nix store add` a `--hash-algo` flag [#9809](https://github.com/NixOS/nix/pull/9809)
|
||||||
|
|
||||||
|
Adds a missing feature that was present in the old CLI, and matches our
|
||||||
|
plans to have similar flags for `nix hash convert` and `nix hash path`.
|
||||||
|
|
||||||
|
- Coercion errors include the failing value
|
||||||
|
|
||||||
|
The `error: cannot coerce a <TYPE> to a string` message now includes the value
|
||||||
|
which caused the error.
|
||||||
|
|
||||||
|
Before:
|
||||||
|
|
||||||
|
```
|
||||||
|
error: cannot coerce a set to a string
|
||||||
|
```
|
||||||
|
|
||||||
|
After:
|
||||||
|
|
||||||
|
```
|
||||||
|
error: cannot coerce a set to a string: { aesSupport = «thunk»;
|
||||||
|
avx2Support = «thunk»; avx512Support = «thunk»; avxSupport = «thunk»;
|
||||||
|
canExecute = «thunk»; config = «thunk»; darwinArch = «thunk»; darwinMinVersion
|
||||||
|
= «thunk»; darwinMinVersionVariable = «thunk»; darwinPlatform = «thunk»; «84
|
||||||
|
attributes elided»}
|
||||||
|
```
|
||||||
|
|
||||||
|
- Type errors include the failing value
|
||||||
|
|
||||||
|
In errors like `value is an integer while a list was expected`, the message now
|
||||||
|
includes the failing value.
|
||||||
|
|
||||||
|
Before:
|
||||||
|
|
||||||
|
```
|
||||||
|
error: value is a set while a string was expected
|
||||||
|
```
|
||||||
|
|
||||||
|
After:
|
||||||
|
|
||||||
|
```
|
||||||
|
error: expected a string but found a set: { ghc810 = «thunk»;
|
||||||
|
ghc8102Binary = «thunk»; ghc8107 = «thunk»; ghc8107Binary = «thunk»;
|
||||||
|
ghc865Binary = «thunk»; ghc90 = «thunk»; ghc902 = «thunk»; ghc92 = «thunk»;
|
||||||
|
ghc924Binary = «thunk»; ghc925 = «thunk»; «17 attributes elided»}
|
||||||
|
```
|
||||||
|
|
||||||
|
- Source locations are printed more consistently in errors [#561](https://github.com/NixOS/nix/issues/561) [#9555](https://github.com/NixOS/nix/pull/9555)
|
||||||
|
|
||||||
|
Source location information is now included in error messages more
|
||||||
|
consistently. Given this code:
|
||||||
|
|
||||||
|
```nix
|
||||||
|
let
|
||||||
|
attr = {foo = "bar";};
|
||||||
|
key = {};
|
||||||
|
in
|
||||||
|
attr.${key}
|
||||||
|
```
|
||||||
|
|
||||||
|
Previously, Nix would show this unhelpful message when attempting to evaluate
|
||||||
|
it:
|
||||||
|
|
||||||
|
```
|
||||||
|
error:
|
||||||
|
… while evaluating an attribute name
|
||||||
|
|
||||||
|
error: value is a set while a string was expected
|
||||||
|
```
|
||||||
|
|
||||||
|
Now, the error message displays where the problematic value was found:
|
||||||
|
|
||||||
|
```
|
||||||
|
error:
|
||||||
|
… while evaluating an attribute name
|
||||||
|
|
||||||
|
at bad.nix:4:11:
|
||||||
|
|
||||||
|
3| key = {};
|
||||||
|
4| in attr.${key}
|
||||||
|
| ^
|
||||||
|
5|
|
||||||
|
|
||||||
|
error: expected a string but found a set
|
||||||
|
```
|
||||||
|
|
||||||
|
- Some stack overflow segfaults are fixed [#9616](https://github.com/NixOS/nix/issues/9616) [#9617](https://github.com/NixOS/nix/pull/9617)
|
||||||
|
|
||||||
|
The number of nested function calls has been restricted, to detect and report
|
||||||
|
infinite function call recursions. The default maximum call depth is 10,000 and
|
||||||
|
can be set with [the `max-call-depth`
|
||||||
|
option](@docroot@/command-ref/conf-file.md#conf-max-call-depth).
|
||||||
|
|
||||||
|
This replaces the `stack overflow (possible infinite recursion)` message.
|
||||||
|
|
||||||
|
- Better error reporting for `with` expressions [#9658](https://github.com/NixOS/nix/pull/9658)
|
||||||
|
|
||||||
|
`with` expressions using non-attrset values to resolve variables are now reported with proper positions, e.g.
|
||||||
|
|
||||||
|
```
|
||||||
|
nix-repl> with 1; a
|
||||||
|
error:
|
||||||
|
… while evaluating the first subexpression of a with expression
|
||||||
|
at «string»:1:1:
|
||||||
|
1| with 1; a
|
||||||
|
| ^
|
||||||
|
|
||||||
|
error: expected a set but found an integer
|
||||||
|
```
|
||||||
|
|
||||||
|
- Functions are printed with more detail [#7145](https://github.com/NixOS/nix/issues/7145) [#9606](https://github.com/NixOS/nix/pull/9606)
|
||||||
|
|
||||||
|
`nix repl`, `nix eval`, `builtins.trace`, and most other places values are
|
||||||
|
printed will now include function names and source location information:
|
||||||
|
|
||||||
|
```
|
||||||
|
$ nix repl nixpkgs
|
||||||
|
nix-repl> builtins.map
|
||||||
|
«primop map»
|
||||||
|
|
||||||
|
nix-repl> builtins.map lib.id
|
||||||
|
«partially applied primop map»
|
||||||
|
|
||||||
|
nix-repl> builtins.trace lib.id "my-value"
|
||||||
|
trace: «lambda id @ /nix/store/8rrzq23h2zq7sv5l2vhw44kls5w0f654-source/lib/trivial.nix:26:5»
|
||||||
|
"my-value"
|
||||||
|
```
|
||||||
|
|
||||||
|
- Flake operations like `nix develop` will no longer fail when run in a Git
|
||||||
|
repository where the `flake.lock` file is `.gitignore`d
|
||||||
|
[#8854](https://github.com/NixOS/nix/issues/8854)
|
||||||
|
[#9324](https://github.com/NixOS/nix/pull/9324)
|
||||||
|
|
||||||
|
- Nix commands will now respect Ctrl-C
|
||||||
|
[#7145](https://github.com/NixOS/nix/issues/7145)
|
||||||
|
[#6995](https://github.com/NixOS/nix/pull/6995)
|
||||||
|
[#9687](https://github.com/NixOS/nix/pull/9687)
|
||||||
|
|
||||||
|
Previously, many Nix commands would hang indefinitely if Ctrl-C was pressed
|
||||||
|
while performing various operations (including `nix develop`, `nix flake
|
||||||
|
update`, and so on). With several fixes to Nix's signal handlers, Nix
|
||||||
|
commands will now exit quickly after Ctrl-C is pressed.
|
302
doc/manual/src/release-notes/rl-2.21.md
Normal file
302
doc/manual/src/release-notes/rl-2.21.md
Normal file
|
@ -0,0 +1,302 @@
|
||||||
|
# Release 2.21.0 (2024-03-11)
|
||||||
|
|
||||||
|
- Fix a fixed-output derivation sandbox escape (CVE-2024-27297)
|
||||||
|
|
||||||
|
Cooperating Nix derivations could send file descriptors to files in the Nix
|
||||||
|
store to each other via Unix domain sockets in the abstract namespace. This
|
||||||
|
allowed one derivation to modify the output of the other derivation, after Nix
|
||||||
|
has registered the path as "valid" and immutable in the Nix database.
|
||||||
|
In particular, this allowed the output of fixed-output derivations to be
|
||||||
|
modified from their expected content.
|
||||||
|
|
||||||
|
This isn't the case any more.
|
||||||
|
|
||||||
|
- CLI options `--arg-from-file` and `--arg-from-stdin` [#10122](https://github.com/NixOS/nix/pull/10122)
|
||||||
|
|
||||||
|
The new CLI option `--arg-from-file` *name* *path* passes the contents
|
||||||
|
of file *path* as a string value via the function argument *name* to a
|
||||||
|
Nix expression. Similarly, the new option `--arg-from-stdin` *name*
|
||||||
|
reads the contents of the string from standard input.
|
||||||
|
|
||||||
|
- Concise error printing in `nix repl` [#9928](https://github.com/NixOS/nix/pull/9928)
|
||||||
|
|
||||||
|
Previously, if an element of a list or attribute set threw an error while
|
||||||
|
evaluating, `nix repl` would print the entire error (including source location
|
||||||
|
information) inline. This output was clumsy and difficult to parse:
|
||||||
|
|
||||||
|
```
|
||||||
|
nix-repl> { err = builtins.throw "uh oh!"; }
|
||||||
|
{ err = «error:
|
||||||
|
… while calling the 'throw' builtin
|
||||||
|
at «string»:1:9:
|
||||||
|
1| { err = builtins.throw "uh oh!"; }
|
||||||
|
| ^
|
||||||
|
|
||||||
|
error: uh oh!»; }
|
||||||
|
```
|
||||||
|
|
||||||
|
Now, only the error message is displayed, making the output much more readable.
|
||||||
|
```
|
||||||
|
nix-repl> { err = builtins.throw "uh oh!"; }
|
||||||
|
{ err = «error: uh oh!»; }
|
||||||
|
```
|
||||||
|
|
||||||
|
However, if the whole expression being evaluated throws an error, source
|
||||||
|
locations and (if applicable) a stack trace are printed, just like you'd expect:
|
||||||
|
|
||||||
|
```
|
||||||
|
nix-repl> builtins.throw "uh oh!"
|
||||||
|
error:
|
||||||
|
… while calling the 'throw' builtin
|
||||||
|
at «string»:1:1:
|
||||||
|
1| builtins.throw "uh oh!"
|
||||||
|
| ^
|
||||||
|
|
||||||
|
error: uh oh!
|
||||||
|
```
|
||||||
|
|
||||||
|
- `--debugger` can now access bindings from `let` expressions [#8827](https://github.com/NixOS/nix/issues/8827) [#9918](https://github.com/NixOS/nix/pull/9918)
|
||||||
|
|
||||||
|
Breakpoints and errors in the bindings of a `let` expression can now access
|
||||||
|
those bindings in the debugger. Previously, only the body of `let` expressions
|
||||||
|
could access those bindings.
|
||||||
|
|
||||||
|
- Enter the `--debugger` when `builtins.trace` is called if `debugger-on-trace` is set [#9914](https://github.com/NixOS/nix/pull/9914)
|
||||||
|
|
||||||
|
If the `debugger-on-trace` option is set and `--debugger` is given,
|
||||||
|
`builtins.trace` calls will behave similarly to `builtins.break` and will enter
|
||||||
|
the debug REPL. This is useful for determining where warnings are being emitted
|
||||||
|
from.
|
||||||
|
|
||||||
|
- Debugger prints source position information [#9913](https://github.com/NixOS/nix/pull/9913)
|
||||||
|
|
||||||
|
The `--debugger` now prints source location information, instead of the
|
||||||
|
pointers of source location information. Before:
|
||||||
|
|
||||||
|
```
|
||||||
|
nix-repl> :bt
|
||||||
|
0: while evaluating the attribute 'python311.pythonForBuild.pkgs'
|
||||||
|
0x600001522598
|
||||||
|
```
|
||||||
|
|
||||||
|
After:
|
||||||
|
|
||||||
|
```
|
||||||
|
0: while evaluating the attribute 'python311.pythonForBuild.pkgs'
|
||||||
|
/nix/store/hg65h51xnp74ikahns9hyf3py5mlbbqq-source/overrides/default.nix:132:27
|
||||||
|
|
||||||
|
131|
|
||||||
|
132| bootstrappingBase = pkgs.${self.python.pythonAttr}.pythonForBuild.pkgs;
|
||||||
|
| ^
|
||||||
|
133| in
|
||||||
|
```
|
||||||
|
|
||||||
|
- The `--debugger` will start more reliably in `let` expressions and function calls [#6649](https://github.com/NixOS/nix/issues/6649) [#9917](https://github.com/NixOS/nix/pull/9917)
|
||||||
|
|
||||||
|
Previously, if you attempted to evaluate this file with the debugger:
|
||||||
|
|
||||||
|
```nix
|
||||||
|
let
|
||||||
|
a = builtins.trace "before inner break" (
|
||||||
|
builtins.break "hello"
|
||||||
|
);
|
||||||
|
b = builtins.trace "before outer break" (
|
||||||
|
builtins.break a
|
||||||
|
);
|
||||||
|
in
|
||||||
|
b
|
||||||
|
```
|
||||||
|
|
||||||
|
Nix would correctly enter the debugger at `builtins.break a`, but if you asked
|
||||||
|
it to `:continue`, it would skip over the `builtins.break "hello"` expression
|
||||||
|
entirely.
|
||||||
|
|
||||||
|
Now, Nix will correctly enter the debugger at both breakpoints.
|
||||||
|
|
||||||
|
- Nested debuggers are no longer supported [#9920](https://github.com/NixOS/nix/pull/9920)
|
||||||
|
|
||||||
|
Previously, evaluating an expression that throws an error in the debugger would
|
||||||
|
enter a second, nested debugger:
|
||||||
|
|
||||||
|
```
|
||||||
|
nix-repl> builtins.throw "what"
|
||||||
|
error: what
|
||||||
|
|
||||||
|
|
||||||
|
Starting REPL to allow you to inspect the current state of the evaluator.
|
||||||
|
|
||||||
|
Welcome to Nix 2.18.1. Type :? for help.
|
||||||
|
|
||||||
|
nix-repl>
|
||||||
|
```
|
||||||
|
|
||||||
|
Now, it just prints the error message like `nix repl`:
|
||||||
|
|
||||||
|
```
|
||||||
|
nix-repl> builtins.throw "what"
|
||||||
|
error:
|
||||||
|
… while calling the 'throw' builtin
|
||||||
|
at «string»:1:1:
|
||||||
|
1| builtins.throw "what"
|
||||||
|
| ^
|
||||||
|
|
||||||
|
error: what
|
||||||
|
```
|
||||||
|
|
||||||
|
- Consistent order of function arguments in printed expressions [#9874](https://github.com/NixOS/nix/pull/9874)
|
||||||
|
|
||||||
|
Function arguments are now printed in lexicographic order rather than the internal, creation-time based symbol order.
|
||||||
|
|
||||||
|
- Fix duplicate attribute error positions for `inherit` [#9874](https://github.com/NixOS/nix/pull/9874)
|
||||||
|
|
||||||
|
When an `inherit` caused a duplicate attribute error the position of the error was not reported correctly, placing the error with the inherit itself or at the start of the bindings block instead of the offending attribute name.
|
||||||
|
|
||||||
|
- `inherit (x) ...` evaluates `x` only once [#9847](https://github.com/NixOS/nix/pull/9847)
|
||||||
|
|
||||||
|
`inherit (x) a b ...` now evaluates the expression `x` only once for all inherited attributes rather than once for each inherited attribute.
|
||||||
|
This does not usually have a measurable impact, but side-effects (such as `builtins.trace`) would be duplicated and expensive expressions (such as derivations) could cause a measurable slowdown.
|
||||||
|
|
||||||
|
- Store paths are allowed to start with `.` [#912](https://github.com/NixOS/nix/issues/912) [#9091](https://github.com/NixOS/nix/pull/9091) [#9095](https://github.com/NixOS/nix/pull/9095) [#9120](https://github.com/NixOS/nix/pull/9120) [#9121](https://github.com/NixOS/nix/pull/9121) [#9122](https://github.com/NixOS/nix/pull/9122) [#9130](https://github.com/NixOS/nix/pull/9130) [#9219](https://github.com/NixOS/nix/pull/9219) [#9224](https://github.com/NixOS/nix/pull/9224) [#9867](https://github.com/NixOS/nix/pull/9867)
|
||||||
|
|
||||||
|
Leading periods were allowed by accident in Nix 2.4. The Nix team has considered this to be a bug, but this behavior has since been relied on by users, leading to unnecessary difficulties.
|
||||||
|
From now on, leading periods are supported. The names `.` and `..` are disallowed, as well as those starting with `.-` or `..-`.
|
||||||
|
|
||||||
|
Nix versions that denied leading periods are documented [in the issue](https://github.com/NixOS/nix/issues/912#issuecomment-1919583286).
|
||||||
|
|
||||||
|
- `nix repl` pretty-prints values [#9931](https://github.com/NixOS/nix/pull/9931)
|
||||||
|
|
||||||
|
`nix repl` will now pretty-print values:
|
||||||
|
|
||||||
|
```
|
||||||
|
{
|
||||||
|
attrs = {
|
||||||
|
a = {
|
||||||
|
b = {
|
||||||
|
c = { };
|
||||||
|
};
|
||||||
|
};
|
||||||
|
};
|
||||||
|
list = [ 1 ];
|
||||||
|
list' = [
|
||||||
|
1
|
||||||
|
2
|
||||||
|
3
|
||||||
|
];
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
- Introduction of `--regex` and `--all` in `nix profile remove` and `nix profile upgrade` [#10166](https://github.com/NixOS/nix/pull/10166)
|
||||||
|
|
||||||
|
Previously the command-line arguments for `nix profile remove` and `nix profile upgrade` matched the package entries using regular expression.
|
||||||
|
For instance:
|
||||||
|
|
||||||
|
```
|
||||||
|
nix profile remove '.*vim.*'
|
||||||
|
```
|
||||||
|
|
||||||
|
This would remove all packages that contain `vim` in their name.
|
||||||
|
|
||||||
|
In most cases, only singular package names were used to remove and upgrade packages. Mixing this with regular expressions sometimes lead to unintended behavior. For instance, `python3.1` could match `python311`.
|
||||||
|
|
||||||
|
To avoid unintended behavior, the arguments are now only matching exact names.
|
||||||
|
|
||||||
|
Matching using regular expressions is still possible by using the new `--regex` flag:
|
||||||
|
|
||||||
|
```
|
||||||
|
nix profile remove --regex '.*vim.*'
|
||||||
|
```
|
||||||
|
|
||||||
|
One of the most useful cases for using regular expressions was to upgrade all packages. This was previously accomplished by:
|
||||||
|
|
||||||
|
```
|
||||||
|
nix profile upgrade '.*'
|
||||||
|
```
|
||||||
|
|
||||||
|
With the introduction of the `--all` flag, this now becomes more straightforward:
|
||||||
|
|
||||||
|
```
|
||||||
|
nix profile upgrade --all
|
||||||
|
```
|
||||||
|
|
||||||
|
- Visual clutter in `--debugger` is reduced [#9919](https://github.com/NixOS/nix/pull/9919)
|
||||||
|
|
||||||
|
Before:
|
||||||
|
```
|
||||||
|
info: breakpoint reached
|
||||||
|
|
||||||
|
|
||||||
|
Starting REPL to allow you to inspect the current state of the evaluator.
|
||||||
|
|
||||||
|
Welcome to Nix 2.20.0pre20231222_dirty. Type :? for help.
|
||||||
|
|
||||||
|
nix-repl> :continue
|
||||||
|
error: uh oh
|
||||||
|
|
||||||
|
|
||||||
|
Starting REPL to allow you to inspect the current state of the evaluator.
|
||||||
|
|
||||||
|
Welcome to Nix 2.20.0pre20231222_dirty. Type :? for help.
|
||||||
|
|
||||||
|
nix-repl>
|
||||||
|
```
|
||||||
|
|
||||||
|
After:
|
||||||
|
|
||||||
|
```
|
||||||
|
info: breakpoint reached
|
||||||
|
|
||||||
|
Nix 2.20.0pre20231222_dirty debugger
|
||||||
|
Type :? for help.
|
||||||
|
nix-repl> :continue
|
||||||
|
error: uh oh
|
||||||
|
|
||||||
|
nix-repl>
|
||||||
|
```
|
||||||
|
|
||||||
|
- Cycle detection in `nix repl` is simpler and more reliable [#8672](https://github.com/NixOS/nix/issues/8672) [#9926](https://github.com/NixOS/nix/pull/9926)
|
||||||
|
|
||||||
|
The cycle detection in `nix repl`, `nix eval`, `builtins.trace`, and everywhere
|
||||||
|
else values are printed is now simpler and matches the cycle detection in
|
||||||
|
`nix-instantiate --eval` output.
|
||||||
|
|
||||||
|
Before:
|
||||||
|
|
||||||
|
```
|
||||||
|
nix eval --expr 'let self = { inherit self; }; in self'
|
||||||
|
{ self = { self = «repeated»; }; }
|
||||||
|
```
|
||||||
|
|
||||||
|
After:
|
||||||
|
|
||||||
|
```
|
||||||
|
{ self = «repeated»; }
|
||||||
|
```
|
||||||
|
|
||||||
|
- In the debugger, `while evaluating the attribute` errors now include position information [#9915](https://github.com/NixOS/nix/pull/9915)
|
||||||
|
|
||||||
|
Before:
|
||||||
|
|
||||||
|
```
|
||||||
|
0: while evaluating the attribute 'python311.pythonForBuild.pkgs'
|
||||||
|
0x600001522598
|
||||||
|
```
|
||||||
|
|
||||||
|
After:
|
||||||
|
|
||||||
|
```
|
||||||
|
0: while evaluating the attribute 'python311.pythonForBuild.pkgs'
|
||||||
|
/nix/store/hg65h51xnp74ikahns9hyf3py5mlbbqq-source/overrides/default.nix:132:27
|
||||||
|
|
||||||
|
131|
|
||||||
|
132| bootstrappingBase = pkgs.${self.python.pythonAttr}.pythonForBuild.pkgs;
|
||||||
|
| ^
|
||||||
|
133| in
|
||||||
|
```
|
||||||
|
|
||||||
|
- Stack size is increased on macOS [#9860](https://github.com/NixOS/nix/pull/9860)
|
||||||
|
|
||||||
|
Previously, Nix would set the stack size to 64MiB on Linux, but would leave the
|
||||||
|
stack size set to the default (approximately 8KiB) on macOS. Now, the stack
|
||||||
|
size is correctly set to 64MiB on macOS as well, which should reduce stack
|
||||||
|
overflow segfaults in deeply-recursive Nix expressions.
|
||||||
|
|
25
flake.lock
25
flake.lock
|
@ -32,34 +32,18 @@
|
||||||
"type": "github"
|
"type": "github"
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"lowdown-src": {
|
|
||||||
"flake": false,
|
|
||||||
"locked": {
|
|
||||||
"lastModified": 1633514407,
|
|
||||||
"narHash": "sha256-Dw32tiMjdK9t3ETl5fzGrutQTzh2rufgZV4A/BbxuD4=",
|
|
||||||
"owner": "kristapsdz",
|
|
||||||
"repo": "lowdown",
|
|
||||||
"rev": "d2c2b44ff6c27b936ec27358a2653caaef8f73b8",
|
|
||||||
"type": "github"
|
|
||||||
},
|
|
||||||
"original": {
|
|
||||||
"owner": "kristapsdz",
|
|
||||||
"repo": "lowdown",
|
|
||||||
"type": "github"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"nixpkgs": {
|
"nixpkgs": {
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1700748986,
|
"lastModified": 1709083642,
|
||||||
"narHash": "sha256-/nqLrNU297h3PCw4QyDpZKZEUHmialJdZW2ceYFobds=",
|
"narHash": "sha256-7kkJQd4rZ+vFrzWu8sTRtta5D1kBG0LSRYAfhtmMlSo=",
|
||||||
"owner": "NixOS",
|
"owner": "NixOS",
|
||||||
"repo": "nixpkgs",
|
"repo": "nixpkgs",
|
||||||
"rev": "9ba29e2346bc542e9909d1021e8fd7d4b3f64db0",
|
"rev": "b550fe4b4776908ac2a861124307045f8e717c8e",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
"owner": "NixOS",
|
"owner": "NixOS",
|
||||||
"ref": "nixos-23.05-small",
|
"ref": "release-23.11",
|
||||||
"repo": "nixpkgs",
|
"repo": "nixpkgs",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
}
|
}
|
||||||
|
@ -84,7 +68,6 @@
|
||||||
"inputs": {
|
"inputs": {
|
||||||
"flake-compat": "flake-compat",
|
"flake-compat": "flake-compat",
|
||||||
"libgit2": "libgit2",
|
"libgit2": "libgit2",
|
||||||
"lowdown-src": "lowdown-src",
|
|
||||||
"nixpkgs": "nixpkgs",
|
"nixpkgs": "nixpkgs",
|
||||||
"nixpkgs-regression": "nixpkgs-regression"
|
"nixpkgs-regression": "nixpkgs-regression"
|
||||||
}
|
}
|
||||||
|
|
689
flake.nix
689
flake.nix
|
@ -6,22 +6,21 @@
|
||||||
|
|
||||||
description = "The purely functional package manager - but super!";
|
description = "The purely functional package manager - but super!";
|
||||||
|
|
||||||
inputs.nixpkgs.url = "github:NixOS/nixpkgs/nixos-23.05-small";
|
# TODO switch to nixos-23.11-small
|
||||||
|
# https://nixpk.gs/pr-tracker.html?pr=291954
|
||||||
|
inputs.nixpkgs.url = "github:NixOS/nixpkgs/release-23.11";
|
||||||
inputs.nixpkgs-regression.url = "github:NixOS/nixpkgs/215d4d0fd80ca5163643b03a33fde804a29cc1e2";
|
inputs.nixpkgs-regression.url = "github:NixOS/nixpkgs/215d4d0fd80ca5163643b03a33fde804a29cc1e2";
|
||||||
inputs.lowdown-src = { url = "github:kristapsdz/lowdown"; flake = false; };
|
|
||||||
inputs.flake-compat = { url = "github:edolstra/flake-compat"; flake = false; };
|
inputs.flake-compat = { url = "github:edolstra/flake-compat"; flake = false; };
|
||||||
inputs.libgit2 = { url = "github:libgit2/libgit2"; flake = false; };
|
inputs.libgit2 = { url = "github:libgit2/libgit2"; flake = false; };
|
||||||
|
|
||||||
outputs = { self, nixpkgs, nixpkgs-regression, lowdown-src, flake-compat, libgit2 }:
|
outputs = { self, nixpkgs, nixpkgs-regression, libgit2, ... }:
|
||||||
|
|
||||||
let
|
let
|
||||||
inherit (nixpkgs) lib;
|
inherit (nixpkgs) lib;
|
||||||
|
inherit (lib) fileset;
|
||||||
|
|
||||||
officialRelease = false;
|
officialRelease = false;
|
||||||
|
|
||||||
# Set to true to build the release notes for the next release.
|
|
||||||
buildUnreleasedNotes = false;
|
|
||||||
|
|
||||||
version = lib.fileContents ./.version + versionSuffix;
|
version = lib.fileContents ./.version + versionSuffix;
|
||||||
versionSuffix =
|
versionSuffix =
|
||||||
if officialRelease
|
if officialRelease
|
||||||
|
@ -35,11 +34,25 @@
|
||||||
systems = linuxSystems ++ darwinSystems;
|
systems = linuxSystems ++ darwinSystems;
|
||||||
|
|
||||||
crossSystems = [
|
crossSystems = [
|
||||||
"armv6l-linux" "armv7l-linux"
|
"armv6l-unknown-linux-gnueabihf"
|
||||||
"x86_64-freebsd13" "x86_64-netbsd"
|
"armv7l-unknown-linux-gnueabihf"
|
||||||
|
"x86_64-unknown-netbsd"
|
||||||
];
|
];
|
||||||
|
|
||||||
stdenvs = [ "gccStdenv" "clangStdenv" "clang11Stdenv" "stdenv" "libcxxStdenv" "ccacheStdenv" ];
|
# Nix doesn't yet build on this platform, so we put it in a
|
||||||
|
# separate list. We just use this for `devShells` and
|
||||||
|
# `nixpkgsFor`, which this depends on.
|
||||||
|
shellCrossSystems = crossSystems ++ [
|
||||||
|
"x86_64-w64-mingw32"
|
||||||
|
];
|
||||||
|
|
||||||
|
stdenvs = [
|
||||||
|
"ccacheStdenv"
|
||||||
|
"clangStdenv"
|
||||||
|
"gccStdenv"
|
||||||
|
"libcxxStdenv"
|
||||||
|
"stdenv"
|
||||||
|
];
|
||||||
|
|
||||||
forAllSystems = lib.genAttrs systems;
|
forAllSystems = lib.genAttrs systems;
|
||||||
|
|
||||||
|
@ -54,57 +67,6 @@
|
||||||
})
|
})
|
||||||
stdenvs);
|
stdenvs);
|
||||||
|
|
||||||
# Experimental fileset library: https://github.com/NixOS/nixpkgs/pull/222981
|
|
||||||
# Not an "idiomatic" flake input because:
|
|
||||||
# - Propagation to dependent locks: https://github.com/NixOS/nix/issues/7730
|
|
||||||
# - Subflake would download redundant and huge parent flake
|
|
||||||
# - No git tree hash support: https://github.com/NixOS/nix/issues/6044
|
|
||||||
inherit (import (builtins.fetchTarball { url = "https://github.com/NixOS/nix/archive/1bdcd7fc8a6a40b2e805bad759b36e64e911036b.tar.gz"; sha256 = "sha256:14ljlpdsp4x7h1fkhbmc4bd3vsqnx8zdql4h3037wh09ad6a0893"; }))
|
|
||||||
fileset;
|
|
||||||
|
|
||||||
baseFiles =
|
|
||||||
# .gitignore has already been processed, so any changes in it are irrelevant
|
|
||||||
# at this point. It is not represented verbatim for test purposes because
|
|
||||||
# that would interfere with repo semantics.
|
|
||||||
fileset.fileFilter (f: f.name != ".gitignore") ./.;
|
|
||||||
|
|
||||||
configureFiles = fileset.unions [
|
|
||||||
./.version
|
|
||||||
./configure.ac
|
|
||||||
./m4
|
|
||||||
# TODO: do we really need README.md? It doesn't seem used in the build.
|
|
||||||
./README.md
|
|
||||||
];
|
|
||||||
|
|
||||||
topLevelBuildFiles = fileset.unions [
|
|
||||||
./local.mk
|
|
||||||
./Makefile
|
|
||||||
./Makefile.config.in
|
|
||||||
./mk
|
|
||||||
];
|
|
||||||
|
|
||||||
functionalTestFiles = fileset.unions [
|
|
||||||
./tests/functional
|
|
||||||
(fileset.fileFilter (f: lib.strings.hasPrefix "nix-profile" f.name) ./scripts)
|
|
||||||
];
|
|
||||||
|
|
||||||
nixSrc = fileset.toSource {
|
|
||||||
root = ./.;
|
|
||||||
fileset = fileset.intersect baseFiles (fileset.unions [
|
|
||||||
configureFiles
|
|
||||||
topLevelBuildFiles
|
|
||||||
./boehmgc-coroutine-sp-fallback.diff
|
|
||||||
./doc
|
|
||||||
./misc
|
|
||||||
./precompiled-headers.h
|
|
||||||
./src
|
|
||||||
./tests/unit
|
|
||||||
./COPYING
|
|
||||||
./scripts/local.mk
|
|
||||||
functionalTestFiles
|
|
||||||
]);
|
|
||||||
};
|
|
||||||
|
|
||||||
# Memoize nixpkgs for different platforms for efficiency.
|
# Memoize nixpkgs for different platforms for efficiency.
|
||||||
nixpkgsFor = forAllSystems
|
nixpkgsFor = forAllSystems
|
||||||
(system: let
|
(system: let
|
||||||
|
@ -113,8 +75,8 @@
|
||||||
inherit system;
|
inherit system;
|
||||||
};
|
};
|
||||||
crossSystem = if crossSystem == null then null else {
|
crossSystem = if crossSystem == null then null else {
|
||||||
system = crossSystem;
|
config = crossSystem;
|
||||||
} // lib.optionalAttrs (crossSystem == "x86_64-freebsd13") {
|
} // lib.optionalAttrs (crossSystem == "x86_64-unknown-freebsd13") {
|
||||||
useLLVM = true;
|
useLLVM = true;
|
||||||
};
|
};
|
||||||
overlays = [
|
overlays = [
|
||||||
|
@ -126,17 +88,43 @@
|
||||||
in {
|
in {
|
||||||
inherit stdenvs native;
|
inherit stdenvs native;
|
||||||
static = native.pkgsStatic;
|
static = native.pkgsStatic;
|
||||||
cross = forAllCrossSystems (crossSystem: make-pkgs crossSystem "stdenv");
|
cross = lib.genAttrs shellCrossSystems (crossSystem: make-pkgs crossSystem "stdenv");
|
||||||
});
|
});
|
||||||
|
|
||||||
commonDeps =
|
installScriptFor = tarballs:
|
||||||
{ pkgs
|
nixpkgsFor.x86_64-linux.native.callPackage ./scripts/installer.nix {
|
||||||
, isStatic ? pkgs.stdenv.hostPlatform.isStatic
|
inherit tarballs;
|
||||||
}:
|
};
|
||||||
with pkgs; rec {
|
|
||||||
# Use "busybox-sandbox-shell" if present,
|
testNixVersions = pkgs: client: daemon:
|
||||||
# if not (legacy) fallback and hope it's sufficient.
|
pkgs.callPackage ./package.nix {
|
||||||
sh = pkgs.busybox-sandbox-shell or (busybox.override {
|
pname =
|
||||||
|
"nix-tests"
|
||||||
|
+ lib.optionalString
|
||||||
|
(lib.versionAtLeast daemon.version "2.4pre20211005" &&
|
||||||
|
lib.versionAtLeast client.version "2.4pre20211005")
|
||||||
|
"-${client.version}-against-${daemon.version}";
|
||||||
|
|
||||||
|
inherit fileset;
|
||||||
|
|
||||||
|
test-client = client;
|
||||||
|
test-daemon = daemon;
|
||||||
|
|
||||||
|
doBuild = false;
|
||||||
|
};
|
||||||
|
|
||||||
|
binaryTarball = nix: pkgs: pkgs.callPackage ./scripts/binary-tarball.nix {
|
||||||
|
inherit nix;
|
||||||
|
};
|
||||||
|
|
||||||
|
overlayFor = getStdenv: final: prev:
|
||||||
|
let
|
||||||
|
stdenv = getStdenv final;
|
||||||
|
in
|
||||||
|
{
|
||||||
|
nixStable = prev.nix;
|
||||||
|
|
||||||
|
default-busybox-sandbox-shell = final.busybox.override {
|
||||||
useMusl = true;
|
useMusl = true;
|
||||||
enableStatic = true;
|
enableStatic = true;
|
||||||
enableMinimal = true;
|
enableMinimal = true;
|
||||||
|
@ -158,375 +146,56 @@
|
||||||
CONFIG_ASH_PRINTF y
|
CONFIG_ASH_PRINTF y
|
||||||
CONFIG_ASH_TEST y
|
CONFIG_ASH_TEST y
|
||||||
'';
|
'';
|
||||||
});
|
};
|
||||||
|
|
||||||
configureFlags =
|
libgit2-nix = final.libgit2.overrideAttrs (attrs: {
|
||||||
lib.optionals stdenv.isLinux [
|
|
||||||
"--with-boost=${boost}/lib"
|
|
||||||
"--with-sandbox-shell=${sh}/bin/busybox"
|
|
||||||
]
|
|
||||||
++ lib.optionals (stdenv.isLinux && !(isStatic && stdenv.system == "aarch64-linux")) [
|
|
||||||
"LDFLAGS=-fuse-ld=gold"
|
|
||||||
];
|
|
||||||
|
|
||||||
testConfigureFlags = [
|
|
||||||
"RAPIDCHECK_HEADERS=${lib.getDev rapidcheck}/extras/gtest/include"
|
|
||||||
] ++ lib.optionals (stdenv.hostPlatform != stdenv.buildPlatform) [
|
|
||||||
"--enable-install-unit-tests"
|
|
||||||
"--with-check-bin-dir=${builtins.placeholder "check"}/bin"
|
|
||||||
"--with-check-lib-dir=${builtins.placeholder "check"}/lib"
|
|
||||||
];
|
|
||||||
|
|
||||||
internalApiDocsConfigureFlags = [
|
|
||||||
"--enable-internal-api-docs"
|
|
||||||
];
|
|
||||||
|
|
||||||
changelog-d = pkgs.buildPackages.changelog-d;
|
|
||||||
|
|
||||||
nativeBuildDeps =
|
|
||||||
[
|
|
||||||
buildPackages.bison
|
|
||||||
buildPackages.flex
|
|
||||||
(lib.getBin buildPackages.lowdown-nix)
|
|
||||||
buildPackages.mdbook
|
|
||||||
buildPackages.mdbook-linkcheck
|
|
||||||
buildPackages.autoconf-archive
|
|
||||||
buildPackages.autoreconfHook
|
|
||||||
buildPackages.pkg-config
|
|
||||||
|
|
||||||
# Tests
|
|
||||||
buildPackages.git
|
|
||||||
buildPackages.mercurial # FIXME: remove? only needed for tests
|
|
||||||
buildPackages.jq # Also for custom mdBook preprocessor.
|
|
||||||
buildPackages.openssh # only needed for tests (ssh-keygen)
|
|
||||||
]
|
|
||||||
++ lib.optionals stdenv.hostPlatform.isLinux [(buildPackages.util-linuxMinimal or buildPackages.utillinuxMinimal)]
|
|
||||||
# Official releases don't have rl-next, so we don't need to compile a changelog
|
|
||||||
++ lib.optional (!officialRelease && buildUnreleasedNotes) changelog-d
|
|
||||||
;
|
|
||||||
|
|
||||||
buildDeps =
|
|
||||||
[ curl
|
|
||||||
bzip2 xz brotli editline
|
|
||||||
openssl sqlite
|
|
||||||
libarchive
|
|
||||||
(pkgs.libgit2.overrideAttrs (attrs: {
|
|
||||||
src = libgit2;
|
src = libgit2;
|
||||||
version = libgit2.lastModifiedDate;
|
version = libgit2.lastModifiedDate;
|
||||||
cmakeFlags = (attrs.cmakeFlags or []) ++ ["-DUSE_SSH=exec"];
|
cmakeFlags = attrs.cmakeFlags or []
|
||||||
}))
|
++ [ "-DUSE_SSH=exec" ];
|
||||||
boost
|
|
||||||
lowdown-nix
|
|
||||||
libsodium
|
|
||||||
]
|
|
||||||
++ lib.optionals stdenv.isLinux [libseccomp]
|
|
||||||
++ lib.optional stdenv.hostPlatform.isx86_64 libcpuid;
|
|
||||||
|
|
||||||
checkDeps = [
|
|
||||||
gtest
|
|
||||||
rapidcheck
|
|
||||||
];
|
|
||||||
|
|
||||||
internalApiDocsDeps = [
|
|
||||||
buildPackages.doxygen
|
|
||||||
];
|
|
||||||
|
|
||||||
awsDeps = lib.optional (stdenv.isLinux || stdenv.isDarwin)
|
|
||||||
(aws-sdk-cpp.override {
|
|
||||||
apis = ["s3" "transfer"];
|
|
||||||
customMemoryManagement = false;
|
|
||||||
});
|
});
|
||||||
|
|
||||||
propagatedDeps =
|
boehmgc-nix = (final.boehmgc.override {
|
||||||
[ ((boehmgc.override {
|
|
||||||
enableLargeConfig = true;
|
enableLargeConfig = true;
|
||||||
}).overrideAttrs(o: {
|
}).overrideAttrs(o: {
|
||||||
patches = (o.patches or []) ++ [
|
patches = (o.patches or []) ++ [
|
||||||
./boehmgc-coroutine-sp-fallback.diff
|
./dep-patches/boehmgc-coroutine-sp-fallback.diff
|
||||||
|
|
||||||
# https://github.com/ivmai/bdwgc/pull/586
|
# https://github.com/ivmai/bdwgc/pull/586
|
||||||
./boehmgc-traceable_allocator-public.diff
|
./dep-patches/boehmgc-traceable_allocator-public.diff
|
||||||
];
|
];
|
||||||
})
|
|
||||||
)
|
|
||||||
nlohmann_json
|
|
||||||
];
|
|
||||||
};
|
|
||||||
|
|
||||||
installScriptFor = systems:
|
|
||||||
with nixpkgsFor.x86_64-linux.native;
|
|
||||||
runCommand "installer-script"
|
|
||||||
{ buildInputs = [ nix ];
|
|
||||||
}
|
|
||||||
''
|
|
||||||
mkdir -p $out/nix-support
|
|
||||||
|
|
||||||
# Converts /nix/store/50p3qk8k...-nix-2.4pre20201102_550e11f/bin/nix to 50p3qk8k.../bin/nix.
|
|
||||||
tarballPath() {
|
|
||||||
# Remove the store prefix
|
|
||||||
local path=''${1#${builtins.storeDir}/}
|
|
||||||
# Get the path relative to the derivation root
|
|
||||||
local rest=''${path#*/}
|
|
||||||
# Get the derivation hash
|
|
||||||
local drvHash=''${path%%-*}
|
|
||||||
echo "$drvHash/$rest"
|
|
||||||
}
|
|
||||||
|
|
||||||
substitute ${./scripts/install.in} $out/install \
|
|
||||||
${pkgs.lib.concatMapStrings
|
|
||||||
(system: let
|
|
||||||
tarball = if builtins.elem system crossSystems then self.hydraJobs.binaryTarballCross.x86_64-linux.${system} else self.hydraJobs.binaryTarball.${system};
|
|
||||||
in '' \
|
|
||||||
--replace '@tarballHash_${system}@' $(nix --experimental-features nix-command hash-file --base16 --type sha256 ${tarball}/*.tar.xz) \
|
|
||||||
--replace '@tarballPath_${system}@' $(tarballPath ${tarball}/*.tar.xz) \
|
|
||||||
''
|
|
||||||
)
|
|
||||||
systems
|
|
||||||
} --replace '@nixVersion@' ${version}
|
|
||||||
|
|
||||||
echo "file installer $out/install" >> $out/nix-support/hydra-build-products
|
|
||||||
'';
|
|
||||||
|
|
||||||
testNixVersions = pkgs: client: daemon: with commonDeps { inherit pkgs; }; with pkgs.lib; pkgs.stdenv.mkDerivation {
|
|
||||||
NIX_DAEMON_PACKAGE = daemon;
|
|
||||||
NIX_CLIENT_PACKAGE = client;
|
|
||||||
name =
|
|
||||||
"nix-super-tests"
|
|
||||||
+ optionalString
|
|
||||||
(versionAtLeast daemon.version "2.4pre20211005" &&
|
|
||||||
versionAtLeast client.version "2.4pre20211005")
|
|
||||||
"-${client.version}-against-${daemon.version}";
|
|
||||||
inherit version;
|
|
||||||
|
|
||||||
src = fileset.toSource {
|
|
||||||
root = ./.;
|
|
||||||
fileset = fileset.intersect baseFiles (fileset.unions [
|
|
||||||
configureFiles
|
|
||||||
topLevelBuildFiles
|
|
||||||
functionalTestFiles
|
|
||||||
]);
|
|
||||||
};
|
|
||||||
|
|
||||||
VERSION_SUFFIX = versionSuffix;
|
|
||||||
|
|
||||||
nativeBuildInputs = nativeBuildDeps;
|
|
||||||
buildInputs = buildDeps ++ awsDeps ++ checkDeps;
|
|
||||||
propagatedBuildInputs = propagatedDeps;
|
|
||||||
|
|
||||||
enableParallelBuilding = true;
|
|
||||||
|
|
||||||
configureFlags =
|
|
||||||
testConfigureFlags # otherwise configure fails
|
|
||||||
++ [ "--disable-build" ];
|
|
||||||
dontBuild = true;
|
|
||||||
doInstallCheck = true;
|
|
||||||
|
|
||||||
installPhase = ''
|
|
||||||
mkdir -p $out
|
|
||||||
'';
|
|
||||||
|
|
||||||
installCheckPhase = ''
|
|
||||||
mkdir -p src/nix-channel
|
|
||||||
make installcheck -j$NIX_BUILD_CORES -l$NIX_BUILD_CORES
|
|
||||||
'';
|
|
||||||
};
|
|
||||||
|
|
||||||
binaryTarball = nix: pkgs:
|
|
||||||
let
|
|
||||||
inherit (pkgs) buildPackages;
|
|
||||||
inherit (pkgs) cacert;
|
|
||||||
installerClosureInfo = buildPackages.closureInfo { rootPaths = [ nix cacert ]; };
|
|
||||||
in
|
|
||||||
|
|
||||||
buildPackages.runCommand "nix-super-binary-tarball-${version}"
|
|
||||||
{ #nativeBuildInputs = lib.optional (system != "aarch64-linux") shellcheck;
|
|
||||||
meta.description = "Distribution-independent Nix bootstrap binaries for ${pkgs.system}";
|
|
||||||
}
|
|
||||||
''
|
|
||||||
cp ${installerClosureInfo}/registration $TMPDIR/reginfo
|
|
||||||
cp ${./scripts/create-darwin-volume.sh} $TMPDIR/create-darwin-volume.sh
|
|
||||||
substitute ${./scripts/install-nix-from-closure.sh} $TMPDIR/install \
|
|
||||||
--subst-var-by nix ${nix} \
|
|
||||||
--subst-var-by cacert ${cacert}
|
|
||||||
|
|
||||||
substitute ${./scripts/install-darwin-multi-user.sh} $TMPDIR/install-darwin-multi-user.sh \
|
|
||||||
--subst-var-by nix ${nix} \
|
|
||||||
--subst-var-by cacert ${cacert}
|
|
||||||
substitute ${./scripts/install-systemd-multi-user.sh} $TMPDIR/install-systemd-multi-user.sh \
|
|
||||||
--subst-var-by nix ${nix} \
|
|
||||||
--subst-var-by cacert ${cacert}
|
|
||||||
substitute ${./scripts/install-multi-user.sh} $TMPDIR/install-multi-user \
|
|
||||||
--subst-var-by nix ${nix} \
|
|
||||||
--subst-var-by cacert ${cacert}
|
|
||||||
|
|
||||||
if type -p shellcheck; then
|
|
||||||
# SC1090: Don't worry about not being able to find
|
|
||||||
# $nix/etc/profile.d/nix.sh
|
|
||||||
shellcheck --exclude SC1090 $TMPDIR/install
|
|
||||||
shellcheck $TMPDIR/create-darwin-volume.sh
|
|
||||||
shellcheck $TMPDIR/install-darwin-multi-user.sh
|
|
||||||
shellcheck $TMPDIR/install-systemd-multi-user.sh
|
|
||||||
|
|
||||||
# SC1091: Don't panic about not being able to source
|
|
||||||
# /etc/profile
|
|
||||||
# SC2002: Ignore "useless cat" "error", when loading
|
|
||||||
# .reginfo, as the cat is a much cleaner
|
|
||||||
# implementation, even though it is "useless"
|
|
||||||
# SC2116: Allow ROOT_HOME=$(echo ~root) for resolving
|
|
||||||
# root's home directory
|
|
||||||
shellcheck --external-sources \
|
|
||||||
--exclude SC1091,SC2002,SC2116 $TMPDIR/install-multi-user
|
|
||||||
fi
|
|
||||||
|
|
||||||
chmod +x $TMPDIR/install
|
|
||||||
chmod +x $TMPDIR/create-darwin-volume.sh
|
|
||||||
chmod +x $TMPDIR/install-darwin-multi-user.sh
|
|
||||||
chmod +x $TMPDIR/install-systemd-multi-user.sh
|
|
||||||
chmod +x $TMPDIR/install-multi-user
|
|
||||||
dir=nix-super-${version}-${pkgs.system}
|
|
||||||
fn=$out/$dir.tar.xz
|
|
||||||
mkdir -p $out/nix-support
|
|
||||||
echo "file binary-dist $fn" >> $out/nix-support/hydra-build-products
|
|
||||||
tar cvfJ $fn \
|
|
||||||
--owner=0 --group=0 --mode=u+rw,uga+r \
|
|
||||||
--mtime='1970-01-01' \
|
|
||||||
--absolute-names \
|
|
||||||
--hard-dereference \
|
|
||||||
--transform "s,$TMPDIR/install,$dir/install," \
|
|
||||||
--transform "s,$TMPDIR/create-darwin-volume.sh,$dir/create-darwin-volume.sh," \
|
|
||||||
--transform "s,$TMPDIR/reginfo,$dir/.reginfo," \
|
|
||||||
--transform "s,$NIX_STORE,$dir/store,S" \
|
|
||||||
$TMPDIR/install \
|
|
||||||
$TMPDIR/create-darwin-volume.sh \
|
|
||||||
$TMPDIR/install-darwin-multi-user.sh \
|
|
||||||
$TMPDIR/install-systemd-multi-user.sh \
|
|
||||||
$TMPDIR/install-multi-user \
|
|
||||||
$TMPDIR/reginfo \
|
|
||||||
$(cat ${installerClosureInfo}/store-paths)
|
|
||||||
'';
|
|
||||||
|
|
||||||
overlayFor = getStdenv: final: prev:
|
|
||||||
let currentStdenv = getStdenv final; in
|
|
||||||
{
|
|
||||||
nixStable = prev.nix;
|
|
||||||
|
|
||||||
# Forward from the previous stage as we don’t want it to pick the lowdown override
|
|
||||||
nixUnstable = prev.nixUnstable;
|
|
||||||
|
|
||||||
nix =
|
|
||||||
with final;
|
|
||||||
with commonDeps {
|
|
||||||
inherit pkgs;
|
|
||||||
inherit (currentStdenv.hostPlatform) isStatic;
|
|
||||||
};
|
|
||||||
let
|
|
||||||
canRunInstalled = currentStdenv.buildPlatform.canExecute currentStdenv.hostPlatform;
|
|
||||||
in currentStdenv.mkDerivation (finalAttrs: {
|
|
||||||
name = "nix-super-${version}";
|
|
||||||
inherit version;
|
|
||||||
|
|
||||||
src = nixSrc;
|
|
||||||
VERSION_SUFFIX = versionSuffix;
|
|
||||||
|
|
||||||
outputs = [ "out" "dev" "doc" ]
|
|
||||||
++ lib.optional (currentStdenv.hostPlatform != currentStdenv.buildPlatform) "check";
|
|
||||||
|
|
||||||
nativeBuildInputs = nativeBuildDeps;
|
|
||||||
buildInputs = buildDeps
|
|
||||||
# There have been issues building these dependencies
|
|
||||||
++ lib.optionals (currentStdenv.hostPlatform == currentStdenv.buildPlatform) awsDeps
|
|
||||||
++ lib.optionals finalAttrs.doCheck checkDeps;
|
|
||||||
|
|
||||||
propagatedBuildInputs = propagatedDeps;
|
|
||||||
|
|
||||||
disallowedReferences = [ boost ];
|
|
||||||
|
|
||||||
preConfigure = lib.optionalString (! currentStdenv.hostPlatform.isStatic)
|
|
||||||
''
|
|
||||||
# Copy libboost_context so we don't get all of Boost in our closure.
|
|
||||||
# https://github.com/NixOS/nixpkgs/issues/45462
|
|
||||||
mkdir -p $out/lib
|
|
||||||
cp -pd ${boost}/lib/{libboost_context*,libboost_thread*,libboost_system*} $out/lib
|
|
||||||
rm -f $out/lib/*.a
|
|
||||||
${lib.optionalString currentStdenv.hostPlatform.isLinux ''
|
|
||||||
chmod u+w $out/lib/*.so.*
|
|
||||||
patchelf --set-rpath $out/lib:${currentStdenv.cc.cc.lib}/lib $out/lib/libboost_thread.so.*
|
|
||||||
''}
|
|
||||||
${lib.optionalString currentStdenv.hostPlatform.isDarwin ''
|
|
||||||
for LIB in $out/lib/*.dylib; do
|
|
||||||
chmod u+w $LIB
|
|
||||||
install_name_tool -id $LIB $LIB
|
|
||||||
install_name_tool -delete_rpath ${boost}/lib/ $LIB || true
|
|
||||||
done
|
|
||||||
install_name_tool -change ${boost}/lib/libboost_system.dylib $out/lib/libboost_system.dylib $out/lib/libboost_thread.dylib
|
|
||||||
''}
|
|
||||||
'';
|
|
||||||
|
|
||||||
configureFlags = configureFlags ++
|
|
||||||
[ "--sysconfdir=/etc" ] ++
|
|
||||||
lib.optional stdenv.hostPlatform.isStatic "--enable-embedded-sandbox-shell" ++
|
|
||||||
[ (lib.enableFeature finalAttrs.doCheck "tests") ] ++
|
|
||||||
lib.optionals finalAttrs.doCheck testConfigureFlags ++
|
|
||||||
lib.optional (!canRunInstalled) "--disable-doc-gen";
|
|
||||||
|
|
||||||
enableParallelBuilding = true;
|
|
||||||
|
|
||||||
makeFlags = "profiledir=$(out)/etc/profile.d PRECOMPILE_HEADERS=1";
|
|
||||||
|
|
||||||
doCheck = true;
|
|
||||||
|
|
||||||
installFlags = "sysconfdir=$(out)/etc";
|
|
||||||
|
|
||||||
postInstall = ''
|
|
||||||
mkdir -p $doc/nix-support
|
|
||||||
echo "doc manual $doc/share/doc/nix/manual" >> $doc/nix-support/hydra-build-products
|
|
||||||
${lib.optionalString currentStdenv.hostPlatform.isStatic ''
|
|
||||||
mkdir -p $out/nix-support
|
|
||||||
echo "file binary-dist $out/bin/nix" >> $out/nix-support/hydra-build-products
|
|
||||||
''}
|
|
||||||
${lib.optionalString currentStdenv.isDarwin ''
|
|
||||||
install_name_tool \
|
|
||||||
-change ${boost}/lib/libboost_context.dylib \
|
|
||||||
$out/lib/libboost_context.dylib \
|
|
||||||
$out/lib/libnixutil.dylib
|
|
||||||
''}
|
|
||||||
'';
|
|
||||||
|
|
||||||
doInstallCheck = finalAttrs.doCheck;
|
|
||||||
installCheckFlags = "sysconfdir=$(out)/etc";
|
|
||||||
installCheckTarget = "installcheck"; # work around buggy detection in stdenv
|
|
||||||
|
|
||||||
separateDebugInfo = !currentStdenv.hostPlatform.isStatic;
|
|
||||||
|
|
||||||
strictDeps = true;
|
|
||||||
|
|
||||||
hardeningDisable = lib.optional stdenv.hostPlatform.isStatic "pie";
|
|
||||||
|
|
||||||
passthru.perl-bindings = final.callPackage ./perl {
|
|
||||||
inherit fileset;
|
|
||||||
stdenv = currentStdenv;
|
|
||||||
};
|
|
||||||
|
|
||||||
meta.platforms = lib.platforms.unix;
|
|
||||||
meta.mainProgram = "nix";
|
|
||||||
});
|
});
|
||||||
|
|
||||||
lowdown-nix = with final; currentStdenv.mkDerivation rec {
|
changelog-d-nix = final.buildPackages.callPackage ./misc/changelog-d.nix { };
|
||||||
name = "lowdown-0.9.0";
|
|
||||||
|
|
||||||
src = lowdown-src;
|
nix =
|
||||||
|
let
|
||||||
|
officialRelease = false;
|
||||||
|
versionSuffix =
|
||||||
|
if officialRelease
|
||||||
|
then ""
|
||||||
|
else "pre${builtins.substring 0 8 (self.lastModifiedDate or self.lastModified or "19700101")}_${self.shortRev or "dirty"}";
|
||||||
|
|
||||||
outputs = [ "out" "bin" "dev" ];
|
in final.callPackage ./package.nix {
|
||||||
|
inherit
|
||||||
nativeBuildInputs = [ buildPackages.which ];
|
fileset
|
||||||
|
stdenv
|
||||||
configurePhase = ''
|
versionSuffix
|
||||||
${if (currentStdenv.isDarwin && currentStdenv.isAarch64) then "echo \"HAVE_SANDBOX_INIT=false\" > configure.local" else ""}
|
;
|
||||||
./configure \
|
officialRelease = false;
|
||||||
PREFIX=${placeholder "dev"} \
|
boehmgc = final.boehmgc-nix;
|
||||||
BINDIR=${placeholder "bin"}/bin
|
libgit2 = final.libgit2-nix;
|
||||||
'';
|
busybox-sandbox-shell = final.busybox-sandbox-shell or final.default-busybox-sandbox-shell;
|
||||||
|
} // {
|
||||||
|
# this is a proper separate downstream package, but put
|
||||||
|
# here also for back compat reasons.
|
||||||
|
perl-bindings = final.nix-perl-bindings;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
nix-perl-bindings = final.callPackage ./perl {
|
||||||
|
inherit fileset stdenv;
|
||||||
|
};
|
||||||
|
|
||||||
};
|
};
|
||||||
|
|
||||||
in {
|
in {
|
||||||
|
@ -539,19 +208,32 @@
|
||||||
# Binary package for various platforms.
|
# Binary package for various platforms.
|
||||||
build = forAllSystems (system: self.packages.${system}.nix);
|
build = forAllSystems (system: self.packages.${system}.nix);
|
||||||
|
|
||||||
|
shellInputs = forAllSystems (system: self.devShells.${system}.default.inputDerivation);
|
||||||
|
|
||||||
buildStatic = lib.genAttrs linux64BitSystems (system: self.packages.${system}.nix-static);
|
buildStatic = lib.genAttrs linux64BitSystems (system: self.packages.${system}.nix-static);
|
||||||
|
|
||||||
buildCross = forAllCrossSystems (crossSystem:
|
buildCross = forAllCrossSystems (crossSystem:
|
||||||
lib.genAttrs ["x86_64-linux"] (system: self.packages.${system}."nix-${crossSystem}"));
|
lib.genAttrs ["x86_64-linux"] (system: self.packages.${system}."nix-${crossSystem}"));
|
||||||
|
|
||||||
buildNoGc = forAllSystems (system: self.packages.${system}.nix.overrideAttrs (a: { configureFlags = (a.configureFlags or []) ++ ["--enable-gc=no"];}));
|
buildNoGc = forAllSystems (system:
|
||||||
|
self.packages.${system}.nix.override { enableGC = false; }
|
||||||
|
);
|
||||||
|
|
||||||
buildNoTests = forAllSystems (system:
|
buildNoTests = forAllSystems (system:
|
||||||
self.packages.${system}.nix.overrideAttrs (a: {
|
self.packages.${system}.nix.override {
|
||||||
doCheck =
|
doCheck = false;
|
||||||
assert ! a?dontCheck;
|
doInstallCheck = false;
|
||||||
false;
|
installUnitTests = false;
|
||||||
})
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
# Toggles some settings for better coverage. Windows needs these
|
||||||
|
# library combinations, and Debian build Nix with GNU readline too.
|
||||||
|
buildReadlineNoMarkdown = forAllSystems (system:
|
||||||
|
self.packages.${system}.nix.override {
|
||||||
|
enableMarkdown = false;
|
||||||
|
readlineFlavor = "readline";
|
||||||
|
}
|
||||||
);
|
);
|
||||||
|
|
||||||
# Perl bindings for various platforms.
|
# Perl bindings for various platforms.
|
||||||
|
@ -572,66 +254,40 @@
|
||||||
# to https://nixos.org/nix/install. It downloads the binary
|
# to https://nixos.org/nix/install. It downloads the binary
|
||||||
# tarball for the user's system and calls the second half of the
|
# tarball for the user's system and calls the second half of the
|
||||||
# installation script.
|
# installation script.
|
||||||
installerScript = installScriptFor [ "x86_64-linux" "i686-linux" "aarch64-linux" "x86_64-darwin" "aarch64-darwin" "armv6l-linux" "armv7l-linux" ];
|
installerScript = installScriptFor [
|
||||||
installerScriptForGHA = installScriptFor [ "x86_64-linux" "x86_64-darwin" "armv6l-linux" "armv7l-linux"];
|
# Native
|
||||||
|
self.hydraJobs.binaryTarball."x86_64-linux"
|
||||||
|
self.hydraJobs.binaryTarball."i686-linux"
|
||||||
|
self.hydraJobs.binaryTarball."aarch64-linux"
|
||||||
|
self.hydraJobs.binaryTarball."x86_64-darwin"
|
||||||
|
self.hydraJobs.binaryTarball."aarch64-darwin"
|
||||||
|
# Cross
|
||||||
|
self.hydraJobs.binaryTarballCross."x86_64-linux"."armv6l-unknown-linux-gnueabihf"
|
||||||
|
self.hydraJobs.binaryTarballCross."x86_64-linux"."armv7l-unknown-linux-gnueabihf"
|
||||||
|
];
|
||||||
|
installerScriptForGHA = installScriptFor [
|
||||||
|
# Native
|
||||||
|
self.hydraJobs.binaryTarball."x86_64-linux"
|
||||||
|
self.hydraJobs.binaryTarball."x86_64-darwin"
|
||||||
|
# Cross
|
||||||
|
self.hydraJobs.binaryTarballCross."x86_64-linux"."armv6l-unknown-linux-gnueabihf"
|
||||||
|
self.hydraJobs.binaryTarballCross."x86_64-linux"."armv7l-unknown-linux-gnueabihf"
|
||||||
|
];
|
||||||
|
|
||||||
# docker image with Nix inside
|
# docker image with Nix inside
|
||||||
dockerImage = lib.genAttrs linux64BitSystems (system: self.packages.${system}.dockerImage);
|
dockerImage = lib.genAttrs linux64BitSystems (system: self.packages.${system}.dockerImage);
|
||||||
|
|
||||||
# Line coverage analysis.
|
# Line coverage analysis.
|
||||||
coverage =
|
coverage = nixpkgsFor.x86_64-linux.native.nix.override {
|
||||||
with nixpkgsFor.x86_64-linux.native;
|
pname = "nix-coverage";
|
||||||
with commonDeps { inherit pkgs; };
|
withCoverageChecks = true;
|
||||||
|
|
||||||
releaseTools.coverageAnalysis {
|
|
||||||
name = "nix-super-coverage-${version}";
|
|
||||||
|
|
||||||
src = nixSrc;
|
|
||||||
|
|
||||||
configureFlags = testConfigureFlags;
|
|
||||||
|
|
||||||
enableParallelBuilding = true;
|
|
||||||
|
|
||||||
nativeBuildInputs = nativeBuildDeps;
|
|
||||||
buildInputs = buildDeps ++ propagatedDeps ++ awsDeps ++ checkDeps;
|
|
||||||
|
|
||||||
dontInstall = false;
|
|
||||||
|
|
||||||
doInstallCheck = true;
|
|
||||||
installCheckTarget = "installcheck"; # work around buggy detection in stdenv
|
|
||||||
|
|
||||||
lcovFilter = [ "*/boost/*" "*-tab.*" ];
|
|
||||||
|
|
||||||
hardeningDisable = ["fortify"];
|
|
||||||
|
|
||||||
NIX_CFLAGS_COMPILE = "-DCOVERAGE=1";
|
|
||||||
};
|
};
|
||||||
|
|
||||||
# API docs for Nix's unstable internal C++ interfaces.
|
# API docs for Nix's unstable internal C++ interfaces.
|
||||||
internal-api-docs =
|
internal-api-docs = nixpkgsFor.x86_64-linux.native.callPackage ./package.nix {
|
||||||
with nixpkgsFor.x86_64-linux.native;
|
inherit fileset;
|
||||||
with commonDeps { inherit pkgs; };
|
doBuild = false;
|
||||||
|
enableInternalAPIDocs = true;
|
||||||
stdenv.mkDerivation {
|
|
||||||
pname = "nix-internal-api-docs";
|
|
||||||
inherit version;
|
|
||||||
|
|
||||||
src = nixSrc;
|
|
||||||
|
|
||||||
configureFlags = testConfigureFlags ++ internalApiDocsConfigureFlags;
|
|
||||||
|
|
||||||
nativeBuildInputs = nativeBuildDeps;
|
|
||||||
buildInputs = buildDeps ++ propagatedDeps
|
|
||||||
++ awsDeps ++ checkDeps ++ internalApiDocsDeps;
|
|
||||||
|
|
||||||
dontBuild = true;
|
|
||||||
|
|
||||||
installTargets = [ "internal-api-html" ];
|
|
||||||
|
|
||||||
postInstall = ''
|
|
||||||
mkdir -p $out/nix-support
|
|
||||||
echo "doc internal-api-docs $out/share/doc/nix/internal-api/html" >> $out/nix-support/hydra-build-products
|
|
||||||
'';
|
|
||||||
};
|
};
|
||||||
|
|
||||||
# System tests.
|
# System tests.
|
||||||
|
@ -640,13 +296,18 @@
|
||||||
# Make sure that nix-env still produces the exact same result
|
# Make sure that nix-env still produces the exact same result
|
||||||
# on a particular version of Nixpkgs.
|
# on a particular version of Nixpkgs.
|
||||||
evalNixpkgs =
|
evalNixpkgs =
|
||||||
with nixpkgsFor.x86_64-linux.native;
|
let
|
||||||
|
inherit (nixpkgsFor.x86_64-linux.native) runCommand nix;
|
||||||
|
in
|
||||||
runCommand "eval-nixos" { buildInputs = [ nix ]; }
|
runCommand "eval-nixos" { buildInputs = [ nix ]; }
|
||||||
''
|
''
|
||||||
type -p nix-env
|
type -p nix-env
|
||||||
# Note: we're filtering out nixos-install-tools because https://github.com/NixOS/nixpkgs/pull/153594#issuecomment-1020530593.
|
# Note: we're filtering out nixos-install-tools because https://github.com/NixOS/nixpkgs/pull/153594#issuecomment-1020530593.
|
||||||
|
(
|
||||||
|
set -x
|
||||||
time nix-env --store dummy:// -f ${nixpkgs-regression} -qaP --drv-path | sort | grep -v nixos-install-tools > packages
|
time nix-env --store dummy:// -f ${nixpkgs-regression} -qaP --drv-path | sort | grep -v nixos-install-tools > packages
|
||||||
[[ $(sha1sum < packages | cut -c1-40) = ff451c521e61e4fe72bdbe2d0ca5d1809affa733 ]]
|
[[ $(sha1sum < packages | cut -c1-40) = e01b031fc9785a572a38be6bc473957e3b6faad7 ]]
|
||||||
|
)
|
||||||
mkdir $out
|
mkdir $out
|
||||||
'';
|
'';
|
||||||
|
|
||||||
|
@ -687,15 +348,24 @@
|
||||||
|
|
||||||
checks = forAllSystems (system: {
|
checks = forAllSystems (system: {
|
||||||
binaryTarball = self.hydraJobs.binaryTarball.${system};
|
binaryTarball = self.hydraJobs.binaryTarball.${system};
|
||||||
perlBindings = self.hydraJobs.perlBindings.${system};
|
|
||||||
installTests = self.hydraJobs.installTests.${system};
|
installTests = self.hydraJobs.installTests.${system};
|
||||||
nixpkgsLibTests = self.hydraJobs.tests.nixpkgsLibTests.${system};
|
nixpkgsLibTests = self.hydraJobs.tests.nixpkgsLibTests.${system};
|
||||||
|
rl-next =
|
||||||
|
let pkgs = nixpkgsFor.${system}.native;
|
||||||
|
in pkgs.buildPackages.runCommand "test-rl-next-release-notes" { } ''
|
||||||
|
LANG=C.UTF-8 ${pkgs.changelog-d-nix}/bin/changelog-d ${./doc/manual/rl-next} >$out
|
||||||
|
'';
|
||||||
} // (lib.optionalAttrs (builtins.elem system linux64BitSystems)) {
|
} // (lib.optionalAttrs (builtins.elem system linux64BitSystems)) {
|
||||||
dockerImage = self.hydraJobs.dockerImage.${system};
|
dockerImage = self.hydraJobs.dockerImage.${system};
|
||||||
|
} // (lib.optionalAttrs (!(builtins.elem system linux32BitSystems))) {
|
||||||
|
# Some perl dependencies are broken on i686-linux.
|
||||||
|
# Since the support is only best-effort there, disable the perl
|
||||||
|
# bindings
|
||||||
|
perlBindings = self.hydraJobs.perlBindings.${system};
|
||||||
});
|
});
|
||||||
|
|
||||||
packages = forAllSystems (system: rec {
|
packages = forAllSystems (system: rec {
|
||||||
inherit (nixpkgsFor.${system}.native) nix;
|
inherit (nixpkgsFor.${system}.native) nix changelog-d-nix;
|
||||||
default = nix;
|
default = nix;
|
||||||
} // (lib.optionalAttrs (builtins.elem system linux64BitSystems) {
|
} // (lib.optionalAttrs (builtins.elem system linux64BitSystems) {
|
||||||
nix-static = nixpkgsFor.${system}.static.nix;
|
nix-static = nixpkgsFor.${system}.static.nix;
|
||||||
|
@ -727,39 +397,9 @@
|
||||||
stdenvs)));
|
stdenvs)));
|
||||||
|
|
||||||
devShells = let
|
devShells = let
|
||||||
makeShell = pkgs: stdenv:
|
makeShell = pkgs: stdenv: (pkgs.nix.override { inherit stdenv; forDevShell = true; }).overrideAttrs (attrs: {
|
||||||
let
|
|
||||||
canRunInstalled = stdenv.buildPlatform.canExecute stdenv.hostPlatform;
|
|
||||||
in
|
|
||||||
with commonDeps { inherit pkgs; };
|
|
||||||
stdenv.mkDerivation {
|
|
||||||
name = "nix-super";
|
|
||||||
|
|
||||||
outputs = [ "out" "dev" "doc" ]
|
|
||||||
++ lib.optional (stdenv.hostPlatform != stdenv.buildPlatform) "check";
|
|
||||||
|
|
||||||
nativeBuildInputs = nativeBuildDeps
|
|
||||||
++ lib.optional stdenv.cc.isClang pkgs.buildPackages.bear
|
|
||||||
++ lib.optional
|
|
||||||
(stdenv.cc.isClang && stdenv.hostPlatform == stdenv.buildPlatform)
|
|
||||||
pkgs.buildPackages.clang-tools
|
|
||||||
# We want changelog-d in the shell even if the current build doesn't need it
|
|
||||||
++ lib.optional (officialRelease || ! buildUnreleasedNotes) changelog-d
|
|
||||||
;
|
|
||||||
|
|
||||||
buildInputs = buildDeps ++ propagatedDeps
|
|
||||||
++ awsDeps ++ checkDeps ++ internalApiDocsDeps;
|
|
||||||
|
|
||||||
configureFlags = configureFlags
|
|
||||||
++ testConfigureFlags ++ internalApiDocsConfigureFlags
|
|
||||||
++ lib.optional (!canRunInstalled) "--disable-doc-gen";
|
|
||||||
|
|
||||||
enableParallelBuilding = true;
|
|
||||||
|
|
||||||
installFlags = "sysconfdir=$(out)/etc";
|
installFlags = "sysconfdir=$(out)/etc";
|
||||||
|
shellHook = ''
|
||||||
shellHook =
|
|
||||||
''
|
|
||||||
PATH=$prefix/bin:$PATH
|
PATH=$prefix/bin:$PATH
|
||||||
unset PYTHONPATH
|
unset PYTHONPATH
|
||||||
export MANPATH=$out/share/man:$MANPATH
|
export MANPATH=$out/share/man:$MANPATH
|
||||||
|
@ -767,7 +407,13 @@
|
||||||
# Make bash completion work.
|
# Make bash completion work.
|
||||||
XDG_DATA_DIRS+=:$out/share
|
XDG_DATA_DIRS+=:$out/share
|
||||||
'';
|
'';
|
||||||
};
|
|
||||||
|
nativeBuildInputs = attrs.nativeBuildInputs or []
|
||||||
|
# TODO: Remove the darwin check once
|
||||||
|
# https://github.com/NixOS/nixpkgs/pull/291814 is available
|
||||||
|
++ lib.optional (stdenv.cc.isClang && !stdenv.buildPlatform.isDarwin) pkgs.buildPackages.bear
|
||||||
|
++ lib.optional (stdenv.cc.isClang && stdenv.hostPlatform == stdenv.buildPlatform) pkgs.buildPackages.clang-tools;
|
||||||
|
});
|
||||||
in
|
in
|
||||||
forAllSystems (system:
|
forAllSystems (system:
|
||||||
let
|
let
|
||||||
|
@ -777,8 +423,9 @@
|
||||||
(forAllStdenvs (stdenvName: makeShell pkgs pkgs.${stdenvName}));
|
(forAllStdenvs (stdenvName: makeShell pkgs pkgs.${stdenvName}));
|
||||||
in
|
in
|
||||||
(makeShells "native" nixpkgsFor.${system}.native) //
|
(makeShells "native" nixpkgsFor.${system}.native) //
|
||||||
(makeShells "static" nixpkgsFor.${system}.static) //
|
(lib.optionalAttrs (!nixpkgsFor.${system}.native.stdenv.isDarwin)
|
||||||
(forAllCrossSystems (crossSystem: let pkgs = nixpkgsFor.${system}.cross.${crossSystem}; in makeShell pkgs pkgs.stdenv)) //
|
(makeShells "static" nixpkgsFor.${system}.static)) //
|
||||||
|
(lib.genAttrs shellCrossSystems (crossSystem: let pkgs = nixpkgsFor.${system}.cross.${crossSystem}; in makeShell pkgs pkgs.stdenv)) //
|
||||||
{
|
{
|
||||||
default = self.devShells.${system}.native-stdenvPackages;
|
default = self.devShells.${system}.native-stdenvPackages;
|
||||||
}
|
}
|
||||||
|
|
|
@ -43,7 +43,11 @@ The team meets twice a week:
|
||||||
- Discussion meeting: [Fridays 13:00-14:00 CET](https://calendar.google.com/calendar/event?eid=MHNtOGVuNWtrZXNpZHR2bW1sM3QyN2ZjaGNfMjAyMjExMjVUMTIwMDAwWiBiOW81MmZvYnFqYWs4b3E4bGZraGczdDBxZ0Bn)
|
- Discussion meeting: [Fridays 13:00-14:00 CET](https://calendar.google.com/calendar/event?eid=MHNtOGVuNWtrZXNpZHR2bW1sM3QyN2ZjaGNfMjAyMjExMjVUMTIwMDAwWiBiOW81MmZvYnFqYWs4b3E4bGZraGczdDBxZ0Bn)
|
||||||
|
|
||||||
1. Triage issues and pull requests from the [No Status](#no-status) column (30 min)
|
1. Triage issues and pull requests from the [No Status](#no-status) column (30 min)
|
||||||
2. Discuss issues and pull requests from the [To discuss](#to-discuss) column (30 min)
|
2. Discuss issues and pull requests from the [To discuss](#to-discuss) column (30 min).
|
||||||
|
Once a month, each team member checks the [Assigned](#assigned) column for prs/issues assigned to them, to either
|
||||||
|
- unblock it by providing input
|
||||||
|
- mark it as draft if it is blocked on the contributor
|
||||||
|
- escalate it back to the team by moving it to To discuss, and leaving a comment as to why the issue needs to be discussed again.
|
||||||
|
|
||||||
- Work meeting: [Mondays 13:00-15:00 CET](https://calendar.google.com/calendar/event?eid=NTM1MG1wNGJnOGpmOTZhYms3bTB1bnY5cWxfMjAyMjExMjFUMTIwMDAwWiBiOW81MmZvYnFqYWs4b3E4bGZraGczdDBxZ0Bn)
|
- Work meeting: [Mondays 13:00-15:00 CET](https://calendar.google.com/calendar/event?eid=NTM1MG1wNGJnOGpmOTZhYms3bTB1bnY5cWxfMjAyMjExMjFUMTIwMDAwWiBiOW81MmZvYnFqYWs4b3E4bGZraGczdDBxZ0Bn)
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,5 @@
|
||||||
#!/usr/bin/env nix-shell
|
#!/usr/bin/env nix
|
||||||
#!nix-shell -i bash ../shell.nix -I nixpkgs=channel:nixos-unstable-small
|
#!nix shell .#changelog-d-nix --command bash
|
||||||
# ^^^^^^^
|
|
||||||
# Only used for bash. shell.nix goes to the flake.
|
|
||||||
|
|
||||||
# --- CONFIGURATION ---
|
# --- CONFIGURATION ---
|
||||||
|
|
||||||
|
|
|
@ -27,8 +27,9 @@ release:
|
||||||
* Compile the release notes by running
|
* Compile the release notes by running
|
||||||
|
|
||||||
```console
|
```console
|
||||||
|
$ export VERSION=X.YY
|
||||||
$ git checkout -b release-notes
|
$ git checkout -b release-notes
|
||||||
$ VERSION=X.YY ./maintainers/release-notes
|
$ ./maintainers/release-notes
|
||||||
```
|
```
|
||||||
|
|
||||||
where `X.YY` is *without* the patch level, e.g. `2.12` rather than ~~`2.12.0`~~.
|
where `X.YY` is *without* the patch level, e.g. `2.12` rather than ~~`2.12.0`~~.
|
||||||
|
|
|
@ -11,6 +11,8 @@ use JSON::PP;
|
||||||
use LWP::UserAgent;
|
use LWP::UserAgent;
|
||||||
use Net::Amazon::S3;
|
use Net::Amazon::S3;
|
||||||
|
|
||||||
|
delete $ENV{'shell'}; # shut up a LWP::UserAgent.pm warning
|
||||||
|
|
||||||
my $evalId = $ARGV[0] or die "Usage: $0 EVAL-ID\n";
|
my $evalId = $ARGV[0] or die "Usage: $0 EVAL-ID\n";
|
||||||
|
|
||||||
my $releasesBucketName = "nix-releases";
|
my $releasesBucketName = "nix-releases";
|
||||||
|
@ -36,9 +38,9 @@ sub fetch {
|
||||||
my $evalUrl = "https://hydra.nixos.org/eval/$evalId";
|
my $evalUrl = "https://hydra.nixos.org/eval/$evalId";
|
||||||
my $evalInfo = decode_json(fetch($evalUrl, 'application/json'));
|
my $evalInfo = decode_json(fetch($evalUrl, 'application/json'));
|
||||||
#print Dumper($evalInfo);
|
#print Dumper($evalInfo);
|
||||||
my $flakeUrl = $evalInfo->{flake} or die;
|
my $flakeUrl = $evalInfo->{flake};
|
||||||
my $flakeInfo = decode_json(`nix flake metadata --json "$flakeUrl"` or die);
|
my $flakeInfo = decode_json(`nix flake metadata --json "$flakeUrl"` or die) if $flakeUrl;
|
||||||
my $nixRev = $flakeInfo->{revision} or die;
|
my $nixRev = ($flakeInfo ? $flakeInfo->{revision} : $evalInfo->{jobsetevalinputs}->{nix}->{revision}) or die;
|
||||||
|
|
||||||
my $buildInfo = decode_json(fetch("$evalUrl/job/build.x86_64-linux", 'application/json'));
|
my $buildInfo = decode_json(fetch("$evalUrl/job/build.x86_64-linux", 'application/json'));
|
||||||
#print Dumper($buildInfo);
|
#print Dumper($buildInfo);
|
||||||
|
@ -83,12 +85,19 @@ my $channelsBucket = $s3_us->bucket($channelsBucketName) or die;
|
||||||
sub getStorePath {
|
sub getStorePath {
|
||||||
my ($jobName, $output) = @_;
|
my ($jobName, $output) = @_;
|
||||||
my $buildInfo = decode_json(fetch("$evalUrl/job/$jobName", 'application/json'));
|
my $buildInfo = decode_json(fetch("$evalUrl/job/$jobName", 'application/json'));
|
||||||
return $buildInfo->{buildoutputs}->{$output or "out"}->{path} or die "cannot get store path for '$jobName'";
|
return $buildInfo->{buildoutputs}->{$output or "out"}->{path} // die "cannot get store path for '$jobName'";
|
||||||
}
|
}
|
||||||
|
|
||||||
sub copyManual {
|
sub copyManual {
|
||||||
my $manual = getStorePath("build.x86_64-linux", "doc");
|
my $manual;
|
||||||
print "$manual\n";
|
eval {
|
||||||
|
$manual = getStorePath("build.x86_64-linux", "doc");
|
||||||
|
};
|
||||||
|
if ($@) {
|
||||||
|
warn "$@";
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
print "Manual: $manual\n";
|
||||||
|
|
||||||
my $manualNar = "$tmpDir/$releaseName-manual.nar.xz";
|
my $manualNar = "$tmpDir/$releaseName-manual.nar.xz";
|
||||||
print "$manualNar\n";
|
print "$manualNar\n";
|
||||||
|
@ -154,19 +163,33 @@ downloadFile("binaryTarball.x86_64-linux", "1");
|
||||||
downloadFile("binaryTarball.aarch64-linux", "1");
|
downloadFile("binaryTarball.aarch64-linux", "1");
|
||||||
downloadFile("binaryTarball.x86_64-darwin", "1");
|
downloadFile("binaryTarball.x86_64-darwin", "1");
|
||||||
downloadFile("binaryTarball.aarch64-darwin", "1");
|
downloadFile("binaryTarball.aarch64-darwin", "1");
|
||||||
downloadFile("binaryTarballCross.x86_64-linux.armv6l-linux", "1");
|
eval {
|
||||||
downloadFile("binaryTarballCross.x86_64-linux.armv7l-linux", "1");
|
downloadFile("binaryTarballCross.x86_64-linux.armv6l-unknown-linux-gnueabihf", "1");
|
||||||
|
};
|
||||||
|
warn "$@" if $@;
|
||||||
|
eval {
|
||||||
|
downloadFile("binaryTarballCross.x86_64-linux.armv7l-unknown-linux-gnueabihf", "1");
|
||||||
|
};
|
||||||
|
warn "$@" if $@;
|
||||||
downloadFile("installerScript", "1");
|
downloadFile("installerScript", "1");
|
||||||
|
|
||||||
# Upload docker images to dockerhub.
|
# Upload docker images to dockerhub.
|
||||||
my $dockerManifest = "";
|
my $dockerManifest = "";
|
||||||
my $dockerManifestLatest = "";
|
my $dockerManifestLatest = "";
|
||||||
|
my $haveDocker = 0;
|
||||||
|
|
||||||
for my $platforms (["x86_64-linux", "amd64"], ["aarch64-linux", "arm64"]) {
|
for my $platforms (["x86_64-linux", "amd64"], ["aarch64-linux", "arm64"]) {
|
||||||
my $system = $platforms->[0];
|
my $system = $platforms->[0];
|
||||||
my $dockerPlatform = $platforms->[1];
|
my $dockerPlatform = $platforms->[1];
|
||||||
my $fn = "nix-$version-docker-image-$dockerPlatform.tar.gz";
|
my $fn = "nix-$version-docker-image-$dockerPlatform.tar.gz";
|
||||||
|
eval {
|
||||||
downloadFile("dockerImage.$system", "1", $fn);
|
downloadFile("dockerImage.$system", "1", $fn);
|
||||||
|
};
|
||||||
|
if ($@) {
|
||||||
|
warn "$@" if $@;
|
||||||
|
next;
|
||||||
|
}
|
||||||
|
$haveDocker = 1;
|
||||||
|
|
||||||
print STDERR "loading docker image for $dockerPlatform...\n";
|
print STDERR "loading docker image for $dockerPlatform...\n";
|
||||||
system("docker load -i $tmpDir/$fn") == 0 or die;
|
system("docker load -i $tmpDir/$fn") == 0 or die;
|
||||||
|
@ -194,21 +217,23 @@ for my $platforms (["x86_64-linux", "amd64"], ["aarch64-linux", "arm64"]) {
|
||||||
$dockerManifestLatest .= " --amend $latestTag"
|
$dockerManifestLatest .= " --amend $latestTag"
|
||||||
}
|
}
|
||||||
|
|
||||||
print STDERR "creating multi-platform docker manifest...\n";
|
if ($haveDocker) {
|
||||||
system("docker manifest rm nixos/nix:$version");
|
print STDERR "creating multi-platform docker manifest...\n";
|
||||||
system("docker manifest create nixos/nix:$version $dockerManifest") == 0 or die;
|
system("docker manifest rm nixos/nix:$version");
|
||||||
if ($isLatest) {
|
system("docker manifest create nixos/nix:$version $dockerManifest") == 0 or die;
|
||||||
|
if ($isLatest) {
|
||||||
print STDERR "creating latest multi-platform docker manifest...\n";
|
print STDERR "creating latest multi-platform docker manifest...\n";
|
||||||
system("docker manifest rm nixos/nix:latest");
|
system("docker manifest rm nixos/nix:latest");
|
||||||
system("docker manifest create nixos/nix:latest $dockerManifestLatest") == 0 or die;
|
system("docker manifest create nixos/nix:latest $dockerManifestLatest") == 0 or die;
|
||||||
}
|
}
|
||||||
|
|
||||||
print STDERR "pushing multi-platform docker manifest...\n";
|
print STDERR "pushing multi-platform docker manifest...\n";
|
||||||
system("docker manifest push nixos/nix:$version") == 0 or die;
|
system("docker manifest push nixos/nix:$version") == 0 or die;
|
||||||
|
|
||||||
if ($isLatest) {
|
if ($isLatest) {
|
||||||
print STDERR "pushing latest multi-platform docker manifest...\n";
|
print STDERR "pushing latest multi-platform docker manifest...\n";
|
||||||
system("docker manifest push nixos/nix:latest") == 0 or die;
|
system("docker manifest push nixos/nix:latest") == 0 or die;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
# Upload nix-fallback-paths.nix.
|
# Upload nix-fallback-paths.nix.
|
||||||
|
|
31
misc/changelog-d.cabal.nix
Normal file
31
misc/changelog-d.cabal.nix
Normal file
|
@ -0,0 +1,31 @@
|
||||||
|
{ mkDerivation, aeson, base, bytestring, cabal-install-parsers
|
||||||
|
, Cabal-syntax, containers, directory, filepath, frontmatter
|
||||||
|
, generic-lens-lite, lib, mtl, optparse-applicative, parsec, pretty
|
||||||
|
, regex-applicative, text, pkgs
|
||||||
|
}:
|
||||||
|
let rev = "f30f6969e9cd8b56242309639d58acea21c99d06";
|
||||||
|
in
|
||||||
|
mkDerivation {
|
||||||
|
pname = "changelog-d";
|
||||||
|
version = "0.1";
|
||||||
|
src = pkgs.fetchurl {
|
||||||
|
name = "changelog-d-${rev}.tar.gz";
|
||||||
|
url = "https://codeberg.org/roberth/changelog-d/archive/${rev}.tar.gz";
|
||||||
|
hash = "sha256-8a2+i5u7YoszAgd5OIEW0eYUcP8yfhtoOIhLJkylYJ4=";
|
||||||
|
} // { inherit rev; };
|
||||||
|
isLibrary = false;
|
||||||
|
isExecutable = true;
|
||||||
|
libraryHaskellDepends = [
|
||||||
|
aeson base bytestring cabal-install-parsers Cabal-syntax containers
|
||||||
|
directory filepath frontmatter generic-lens-lite mtl parsec pretty
|
||||||
|
regex-applicative text
|
||||||
|
];
|
||||||
|
executableHaskellDepends = [
|
||||||
|
base bytestring Cabal-syntax directory filepath
|
||||||
|
optparse-applicative
|
||||||
|
];
|
||||||
|
doHaddock = false;
|
||||||
|
description = "Concatenate changelog entries into a single one";
|
||||||
|
license = lib.licenses.gpl3Plus;
|
||||||
|
mainProgram = "changelog-d";
|
||||||
|
}
|
31
misc/changelog-d.nix
Normal file
31
misc/changelog-d.nix
Normal file
|
@ -0,0 +1,31 @@
|
||||||
|
# Taken temporarily from <nixpkgs/pkgs/by-name/ch/changelog-d/package.nix>
|
||||||
|
{
|
||||||
|
callPackage,
|
||||||
|
lib,
|
||||||
|
haskell,
|
||||||
|
haskellPackages,
|
||||||
|
}:
|
||||||
|
|
||||||
|
let
|
||||||
|
hsPkg = haskellPackages.callPackage ./changelog-d.cabal.nix { };
|
||||||
|
|
||||||
|
addCompletions = haskellPackages.generateOptparseApplicativeCompletions ["changelog-d"];
|
||||||
|
|
||||||
|
haskellModifications =
|
||||||
|
lib.flip lib.pipe [
|
||||||
|
addCompletions
|
||||||
|
haskell.lib.justStaticExecutables
|
||||||
|
];
|
||||||
|
|
||||||
|
mkDerivationOverrides = finalAttrs: oldAttrs: {
|
||||||
|
|
||||||
|
version = oldAttrs.version + "-git-${lib.strings.substring 0 7 oldAttrs.src.rev}";
|
||||||
|
|
||||||
|
meta = oldAttrs.meta // {
|
||||||
|
homepage = "https://codeberg.org/roberth/changelog-d";
|
||||||
|
maintainers = [ lib.maintainers.roberth ];
|
||||||
|
};
|
||||||
|
|
||||||
|
};
|
||||||
|
in
|
||||||
|
(haskellModifications hsPkg).overrideAttrs mkDerivationOverrides
|
11
mk/compilation-database.mk
Normal file
11
mk/compilation-database.mk
Normal file
|
@ -0,0 +1,11 @@
|
||||||
|
compile-commands-json-files :=
|
||||||
|
|
||||||
|
define write-compile-commands
|
||||||
|
_srcs := $$(sort $$(foreach src, $$($(1)_SOURCES), $$(src)))
|
||||||
|
|
||||||
|
$(1)_COMPILE_COMMANDS_JSON := $$(addprefix $(buildprefix), $$(addsuffix .compile_commands.json, $$(basename $$(_srcs))))
|
||||||
|
|
||||||
|
compile-commands-json-files += $$($(1)_COMPILE_COMMANDS_JSON)
|
||||||
|
|
||||||
|
clean-files += $$($(1)_COMPILE_COMMANDS_JSON)
|
||||||
|
endef
|
|
@ -1,5 +1,5 @@
|
||||||
%.gen.hh: %
|
%.gen.hh: %
|
||||||
@echo 'R"foo(' >> $@.tmp
|
@echo 'R"__NIX_STR(' >> $@.tmp
|
||||||
$(trace-gen) cat $< >> $@.tmp
|
$(trace-gen) cat $< >> $@.tmp
|
||||||
@echo ')foo"' >> $@.tmp
|
@echo ')__NIX_STR"' >> $@.tmp
|
||||||
@mv $@.tmp $@
|
@mv $@.tmp $@
|
||||||
|
|
|
@ -1,12 +0,0 @@
|
||||||
# This file is only active for `./configure --disable-tests`.
|
|
||||||
# Running `make check` or `make installcheck` would indicate a mistake in the
|
|
||||||
# caller.
|
|
||||||
|
|
||||||
installcheck:
|
|
||||||
@echo "Tests are disabled. Configure without '--disable-tests', or avoid calling 'make installcheck'."
|
|
||||||
@exit 1
|
|
||||||
|
|
||||||
# This currently has little effect.
|
|
||||||
check:
|
|
||||||
@echo "Tests are disabled. Configure without '--disable-tests', or avoid calling 'make check'."
|
|
||||||
@exit 1
|
|
31
mk/lib.mk
31
mk/lib.mk
|
@ -12,24 +12,7 @@ man-pages :=
|
||||||
install-tests :=
|
install-tests :=
|
||||||
install-tests-groups :=
|
install-tests-groups :=
|
||||||
|
|
||||||
ifdef HOST_OS
|
include mk/platform.mk
|
||||||
HOST_KERNEL = $(firstword $(subst -, ,$(HOST_OS)))
|
|
||||||
ifeq ($(HOST_KERNEL), cygwin)
|
|
||||||
HOST_CYGWIN = 1
|
|
||||||
endif
|
|
||||||
ifeq ($(patsubst darwin%,,$(HOST_KERNEL)),)
|
|
||||||
HOST_DARWIN = 1
|
|
||||||
endif
|
|
||||||
ifeq ($(patsubst freebsd%,,$(HOST_KERNEL)),)
|
|
||||||
HOST_FREEBSD = 1
|
|
||||||
endif
|
|
||||||
ifeq ($(HOST_KERNEL), linux)
|
|
||||||
HOST_LINUX = 1
|
|
||||||
endif
|
|
||||||
ifeq ($(patsubst solaris%,,$(HOST_KERNEL)),)
|
|
||||||
HOST_SOLARIS = 1
|
|
||||||
endif
|
|
||||||
endif
|
|
||||||
|
|
||||||
# Hack to define a literal space.
|
# Hack to define a literal space.
|
||||||
space :=
|
space :=
|
||||||
|
@ -85,6 +68,7 @@ include mk/patterns.mk
|
||||||
include mk/templates.mk
|
include mk/templates.mk
|
||||||
include mk/cxx-big-literal.mk
|
include mk/cxx-big-literal.mk
|
||||||
include mk/tests.mk
|
include mk/tests.mk
|
||||||
|
include mk/compilation-database.mk
|
||||||
|
|
||||||
|
|
||||||
# Include all sub-Makefiles.
|
# Include all sub-Makefiles.
|
||||||
|
@ -114,6 +98,17 @@ $(foreach test-group, $(install-tests-groups), \
|
||||||
$(eval $(call run-test,$(test),$(install_test_init))) \
|
$(eval $(call run-test,$(test),$(install_test_init))) \
|
||||||
$(eval $(test-group).test-group: $(test).test)))
|
$(eval $(test-group).test-group: $(test).test)))
|
||||||
|
|
||||||
|
# Compilation database.
|
||||||
|
$(foreach lib, $(libraries), $(eval $(call write-compile-commands,$(lib))))
|
||||||
|
$(foreach prog, $(programs), $(eval $(call write-compile-commands,$(prog))))
|
||||||
|
|
||||||
|
compile_commands.json: $(compile-commands-json-files)
|
||||||
|
@jq --slurp '.' $^ >$@
|
||||||
|
|
||||||
|
# Include makefiles requiring built programs.
|
||||||
|
$(foreach mf, $(makefiles-late), $(eval $(call include-sub-makefile,$(mf))))
|
||||||
|
|
||||||
|
|
||||||
$(foreach file, $(man-pages), $(eval $(call install-data-in, $(file), $(mandir)/man$(patsubst .%,%,$(suffix $(file))))))
|
$(foreach file, $(man-pages), $(eval $(call install-data-in, $(file), $(mandir)/man$(patsubst .%,%,$(suffix $(file))))))
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -3,13 +3,19 @@ libs-list :=
|
||||||
ifdef HOST_DARWIN
|
ifdef HOST_DARWIN
|
||||||
SO_EXT = dylib
|
SO_EXT = dylib
|
||||||
else
|
else
|
||||||
ifdef HOST_CYGWIN
|
ifdef HOST_WINDOWS
|
||||||
SO_EXT = dll
|
SO_EXT = dll
|
||||||
else
|
else
|
||||||
SO_EXT = so
|
SO_EXT = so
|
||||||
endif
|
endif
|
||||||
endif
|
endif
|
||||||
|
|
||||||
|
ifdef HOST_UNIX
|
||||||
|
THREAD_LDFLAGS = -pthread
|
||||||
|
else
|
||||||
|
THREAD_LDFLAGS =
|
||||||
|
endif
|
||||||
|
|
||||||
# Build a library with symbolic name $(1). The library is defined by
|
# Build a library with symbolic name $(1). The library is defined by
|
||||||
# various variables prefixed by ‘$(1)_’:
|
# various variables prefixed by ‘$(1)_’:
|
||||||
#
|
#
|
||||||
|
@ -59,7 +65,7 @@ define build-library
|
||||||
$(1)_OBJS := $$(addprefix $(buildprefix), $$(addsuffix .o, $$(basename $$(_srcs))))
|
$(1)_OBJS := $$(addprefix $(buildprefix), $$(addsuffix .o, $$(basename $$(_srcs))))
|
||||||
_libs := $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_PATH))
|
_libs := $$(foreach lib, $$($(1)_LIBS), $$($$(lib)_PATH))
|
||||||
|
|
||||||
ifdef HOST_CYGWIN
|
ifdef HOST_WINDOWS
|
||||||
$(1)_INSTALL_DIR ?= $$(bindir)
|
$(1)_INSTALL_DIR ?= $$(bindir)
|
||||||
else
|
else
|
||||||
$(1)_INSTALL_DIR ?= $$(libdir)
|
$(1)_INSTALL_DIR ?= $$(libdir)
|
||||||
|
@ -79,7 +85,7 @@ define build-library
|
||||||
endif
|
endif
|
||||||
else
|
else
|
||||||
ifndef HOST_DARWIN
|
ifndef HOST_DARWIN
|
||||||
ifndef HOST_CYGWIN
|
ifndef HOST_WINDOWS
|
||||||
$(1)_LDFLAGS += -Wl,-z,defs
|
$(1)_LDFLAGS += -Wl,-z,defs
|
||||||
endif
|
endif
|
||||||
endif
|
endif
|
||||||
|
|
|
@ -1,11 +1,41 @@
|
||||||
|
|
||||||
|
# These are the complete command lines we use to compile C and C++ files.
|
||||||
|
# - $< is the source file.
|
||||||
|
# - $1 is the object file to create.
|
||||||
|
CC_CMD=$(CC) -o $1 -c $< $(CPPFLAGS) $(GLOBAL_CFLAGS) $(CFLAGS) $($1_CFLAGS) -MMD -MF $(call filename-to-dep,$1) -MP
|
||||||
|
CXX_CMD=$(CXX) -o $1 -c $< $(CPPFLAGS) $(GLOBAL_CXXFLAGS_PCH) $(GLOBAL_CXXFLAGS) $(CXXFLAGS) $($1_CXXFLAGS) $(ERROR_SWITCH_ENUM) -MMD -MF $(call filename-to-dep,$1) -MP
|
||||||
|
|
||||||
|
# We use COMPILE_COMMANDS_JSON_CMD to turn a compilation command (like CC_CMD
|
||||||
|
# or CXX_CMD above) into a comple_commands.json file. We rely on bash native
|
||||||
|
# word splitting to define the positional arguments.
|
||||||
|
# - $< is the source file being compiled.
|
||||||
|
COMPILE_COMMANDS_JSON_CMD=jq --null-input '{ directory: $$ENV.PWD, file: "$<", arguments: $$ARGS.positional }' --args --
|
||||||
|
|
||||||
|
|
||||||
$(buildprefix)%.o: %.cc
|
$(buildprefix)%.o: %.cc
|
||||||
@mkdir -p "$(dir $@)"
|
@mkdir -p "$(dir $@)"
|
||||||
$(trace-cxx) $(CXX) -o $@ -c $< $(CPPFLAGS) $(GLOBAL_CXXFLAGS_PCH) $(GLOBAL_CXXFLAGS) $(CXXFLAGS) $($@_CXXFLAGS) $(ERROR_SWITCH_ENUM) -MMD -MF $(call filename-to-dep, $@) -MP
|
$(trace-cxx) $(call CXX_CMD,$@)
|
||||||
|
|
||||||
$(buildprefix)%.o: %.cpp
|
$(buildprefix)%.o: %.cpp
|
||||||
@mkdir -p "$(dir $@)"
|
@mkdir -p "$(dir $@)"
|
||||||
$(trace-cxx) $(CXX) -o $@ -c $< $(CPPFLAGS) $(GLOBAL_CXXFLAGS_PCH) $(GLOBAL_CXXFLAGS) $(CXXFLAGS) $($@_CXXFLAGS) $(ERROR_SWITCH_ENUM) -MMD -MF $(call filename-to-dep, $@) -MP
|
$(trace-cxx) $(call CXX_CMD,$@)
|
||||||
|
|
||||||
$(buildprefix)%.o: %.c
|
$(buildprefix)%.o: %.c
|
||||||
@mkdir -p "$(dir $@)"
|
@mkdir -p "$(dir $@)"
|
||||||
$(trace-cc) $(CC) -o $@ -c $< $(CPPFLAGS) $(GLOBAL_CFLAGS) $(CFLAGS) $($@_CFLAGS) -MMD -MF $(call filename-to-dep, $@) -MP
|
$(trace-cc) $(call CC_CMD,$@)
|
||||||
|
|
||||||
|
# In the following we need to replace the .compile_commands.json extension in $@ with .o
|
||||||
|
# to make the object file. This is needed because CC_CMD and CXX_CMD do further expansions
|
||||||
|
# based on the object file name (i.e. *_CXXFLAGS and filename-to-dep).
|
||||||
|
|
||||||
|
$(buildprefix)%.compile_commands.json: %.cc
|
||||||
|
@mkdir -p "$(dir $@)"
|
||||||
|
$(trace-jq) $(COMPILE_COMMANDS_JSON_CMD) $(call CXX_CMD,$(@:.compile_commands.json=.o)) > $@
|
||||||
|
|
||||||
|
$(buildprefix)%.compile_commands.json: %.cpp
|
||||||
|
@mkdir -p "$(dir $@)"
|
||||||
|
$(trace-jq) $(COMPILE_COMMANDS_JSON_CMD) $(call CXX_CMD,$(@:.compile_commands.json=.o)) > $@
|
||||||
|
|
||||||
|
$(buildprefix)%.compile_commands.json: %.c
|
||||||
|
@mkdir -p "$(dir $@)"
|
||||||
|
$(trace-jq) $(COMPILE_COMMANDS_JSON_CMD) $(call CC_CMD,$(@:.compile_commands.json=.o)) > $@
|
||||||
|
|
32
mk/platform.mk
Normal file
32
mk/platform.mk
Normal file
|
@ -0,0 +1,32 @@
|
||||||
|
ifdef HOST_OS
|
||||||
|
HOST_KERNEL = $(firstword $(subst -, ,$(HOST_OS)))
|
||||||
|
ifeq ($(patsubst mingw%,,$(HOST_KERNEL)),)
|
||||||
|
HOST_MINGW = 1
|
||||||
|
HOST_WINDOWS = 1
|
||||||
|
endif
|
||||||
|
ifeq ($(HOST_KERNEL), cygwin)
|
||||||
|
HOST_CYGWIN = 1
|
||||||
|
HOST_WINDOWS = 1
|
||||||
|
HOST_UNIX = 1
|
||||||
|
endif
|
||||||
|
ifeq ($(patsubst darwin%,,$(HOST_KERNEL)),)
|
||||||
|
HOST_DARWIN = 1
|
||||||
|
HOST_UNIX = 1
|
||||||
|
endif
|
||||||
|
ifeq ($(patsubst freebsd%,,$(HOST_KERNEL)),)
|
||||||
|
HOST_FREEBSD = 1
|
||||||
|
HOST_UNIX = 1
|
||||||
|
endif
|
||||||
|
ifeq ($(patsubst netbsd%,,$(HOST_KERNEL)),)
|
||||||
|
HOST_NETBSD = 1
|
||||||
|
HOST_UNIX = 1
|
||||||
|
endif
|
||||||
|
ifeq ($(HOST_KERNEL), linux)
|
||||||
|
HOST_LINUX = 1
|
||||||
|
HOST_UNIX = 1
|
||||||
|
endif
|
||||||
|
ifeq ($(patsubst solaris%,,$(HOST_KERNEL)),)
|
||||||
|
HOST_SOLARIS = 1
|
||||||
|
HOST_UNIX = 1
|
||||||
|
endif
|
||||||
|
endif
|
|
@ -1,5 +1,11 @@
|
||||||
programs-list :=
|
programs-list :=
|
||||||
|
|
||||||
|
ifdef HOST_WINDOWS
|
||||||
|
EXE_EXT = .exe
|
||||||
|
else
|
||||||
|
EXE_EXT =
|
||||||
|
endif
|
||||||
|
|
||||||
# Build a program with symbolic name $(1). The program is defined by
|
# Build a program with symbolic name $(1). The program is defined by
|
||||||
# various variables prefixed by ‘$(1)_’:
|
# various variables prefixed by ‘$(1)_’:
|
||||||
#
|
#
|
||||||
|
@ -31,7 +37,7 @@ define build-program
|
||||||
_srcs := $$(sort $$(foreach src, $$($(1)_SOURCES), $$(src)))
|
_srcs := $$(sort $$(foreach src, $$($(1)_SOURCES), $$(src)))
|
||||||
$(1)_OBJS := $$(addprefix $(buildprefix), $$(addsuffix .o, $$(basename $$(_srcs))))
|
$(1)_OBJS := $$(addprefix $(buildprefix), $$(addsuffix .o, $$(basename $$(_srcs))))
|
||||||
_libs := $$(foreach lib, $$($(1)_LIBS), $$(foreach lib2, $$($$(lib)_LIB_CLOSURE), $$($$(lib2)_PATH)))
|
_libs := $$(foreach lib, $$($(1)_LIBS), $$(foreach lib2, $$($$(lib)_LIB_CLOSURE), $$($$(lib2)_PATH)))
|
||||||
$(1)_PATH := $$(_d)/$$($(1)_NAME)
|
$(1)_PATH := $$(_d)/$$($(1)_NAME)$(EXE_EXT)
|
||||||
|
|
||||||
$$(eval $$(call create-dir, $$(_d)))
|
$$(eval $$(call create-dir, $$(_d)))
|
||||||
|
|
||||||
|
@ -42,7 +48,7 @@ define build-program
|
||||||
|
|
||||||
ifdef $(1)_INSTALL_DIR
|
ifdef $(1)_INSTALL_DIR
|
||||||
|
|
||||||
$(1)_INSTALL_PATH := $$($(1)_INSTALL_DIR)/$$($(1)_NAME)
|
$(1)_INSTALL_PATH := $$($(1)_INSTALL_DIR)/$$($(1)_NAME)$(EXE_EXT)
|
||||||
|
|
||||||
$$(eval $$(call create-dir, $$($(1)_INSTALL_DIR)))
|
$$(eval $$(call create-dir, $$($(1)_INSTALL_DIR)))
|
||||||
|
|
||||||
|
|
|
@ -10,10 +10,10 @@ endef
|
||||||
|
|
||||||
ifneq ($(MAKECMDGOALS), clean)
|
ifneq ($(MAKECMDGOALS), clean)
|
||||||
|
|
||||||
$(buildprefix)%.h: %.h.in
|
$(buildprefix)%.h: %.h.in $(buildprefix)config.status
|
||||||
$(trace-gen) rm -f $@ && cd $(buildprefixrel) && ./config.status --quiet --header=$(@:$(buildprefix)%=%)
|
$(trace-gen) rm -f $@ && cd $(buildprefixrel) && ./config.status --quiet --header=$(@:$(buildprefix)%=%)
|
||||||
|
|
||||||
$(buildprefix)%: %.in
|
$(buildprefix)%: %.in $(buildprefix)config.status
|
||||||
$(trace-gen) rm -f $@ && cd $(buildprefixrel) && ./config.status --quiet --file=$(@:$(buildprefix)%=%)
|
$(trace-gen) rm -f $@ && cd $(buildprefixrel) && ./config.status --quiet --file=$(@:$(buildprefix)%=%)
|
||||||
|
|
||||||
endif
|
endif
|
||||||
|
|
|
@ -10,6 +10,8 @@ ifeq ($(V), 0)
|
||||||
trace-install = @echo " INST " $@;
|
trace-install = @echo " INST " $@;
|
||||||
trace-mkdir = @echo " MKDIR " $@;
|
trace-mkdir = @echo " MKDIR " $@;
|
||||||
trace-test = @echo " TEST " $@;
|
trace-test = @echo " TEST " $@;
|
||||||
|
trace-sh = @echo " SH " $@;
|
||||||
|
trace-jq = @echo " JQ " $@;
|
||||||
|
|
||||||
suppress = @
|
suppress = @
|
||||||
|
|
||||||
|
|
397
package.nix
Normal file
397
package.nix
Normal file
|
@ -0,0 +1,397 @@
|
||||||
|
{ lib
|
||||||
|
, stdenv
|
||||||
|
, releaseTools
|
||||||
|
, autoconf-archive
|
||||||
|
, autoreconfHook
|
||||||
|
, aws-sdk-cpp
|
||||||
|
, boehmgc
|
||||||
|
, nlohmann_json
|
||||||
|
, bison
|
||||||
|
, boost
|
||||||
|
, brotli
|
||||||
|
, bzip2
|
||||||
|
, curl
|
||||||
|
, editline
|
||||||
|
, readline
|
||||||
|
, fileset
|
||||||
|
, flex
|
||||||
|
, git
|
||||||
|
, gtest
|
||||||
|
, jq
|
||||||
|
, doxygen
|
||||||
|
, libarchive
|
||||||
|
, libcpuid
|
||||||
|
, libgit2
|
||||||
|
, libseccomp
|
||||||
|
, libsodium
|
||||||
|
, man
|
||||||
|
, lowdown
|
||||||
|
, mdbook
|
||||||
|
, mdbook-linkcheck
|
||||||
|
, mercurial
|
||||||
|
, openssh
|
||||||
|
, openssl
|
||||||
|
, pkg-config
|
||||||
|
, rapidcheck
|
||||||
|
, sqlite
|
||||||
|
, util-linux
|
||||||
|
, xz
|
||||||
|
|
||||||
|
, busybox-sandbox-shell ? null
|
||||||
|
|
||||||
|
# Configuration Options
|
||||||
|
#:
|
||||||
|
# This probably seems like too many degrees of freedom, but it
|
||||||
|
# faithfully reflects how the underlying configure + make build system
|
||||||
|
# work. The top-level flake.nix will choose useful combinations of these
|
||||||
|
# options to CI.
|
||||||
|
|
||||||
|
, pname ? "nix"
|
||||||
|
|
||||||
|
, versionSuffix ? ""
|
||||||
|
, officialRelease ? false
|
||||||
|
|
||||||
|
# Whether to build Nix. Useful to skip for tasks like (a) just
|
||||||
|
# generating API docs or (b) testing existing pre-built versions of Nix
|
||||||
|
, doBuild ? true
|
||||||
|
|
||||||
|
# Run the unit tests as part of the build. See `installUnitTests` for an
|
||||||
|
# alternative to this.
|
||||||
|
, doCheck ? __forDefaults.canRunInstalled
|
||||||
|
|
||||||
|
# Run the functional tests as part of the build.
|
||||||
|
, doInstallCheck ? test-client != null || __forDefaults.canRunInstalled
|
||||||
|
|
||||||
|
# Check test coverage of Nix. Probably want to use with with at least
|
||||||
|
# one of `doCHeck` or `doInstallCheck` enabled.
|
||||||
|
, withCoverageChecks ? false
|
||||||
|
|
||||||
|
# Whether to build the regular manual
|
||||||
|
, enableManual ? __forDefaults.canRunInstalled
|
||||||
|
|
||||||
|
# Whether to use garbage collection for the Nix language evaluator.
|
||||||
|
#
|
||||||
|
# If it is disabled, we just leak memory, but this is not as bad as it
|
||||||
|
# sounds so long as evaluation just takes places within short-lived
|
||||||
|
# processes. (When the process exits, the memory is reclaimed; it is
|
||||||
|
# only leaked *within* the process.)
|
||||||
|
, enableGC ? true
|
||||||
|
|
||||||
|
# Whether to enable Markdown rendering in the Nix binary.
|
||||||
|
, enableMarkdown ? !stdenv.hostPlatform.isWindows
|
||||||
|
|
||||||
|
# Which interactive line editor library to use for Nix's repl.
|
||||||
|
#
|
||||||
|
# Currently supported choices are:
|
||||||
|
#
|
||||||
|
# - editline (default)
|
||||||
|
# - readline
|
||||||
|
, readlineFlavor ? if stdenv.hostPlatform.isWindows then "readline" else "editline"
|
||||||
|
|
||||||
|
# Whether to build the internal API docs, can be done separately from
|
||||||
|
# everything else.
|
||||||
|
, enableInternalAPIDocs ? false
|
||||||
|
|
||||||
|
# Whether to install unit tests. This is useful when cross compiling
|
||||||
|
# since we cannot run them natively during the build, but can do so
|
||||||
|
# later.
|
||||||
|
, installUnitTests ? doBuild && !__forDefaults.canExecuteHost
|
||||||
|
|
||||||
|
# For running the functional tests against a pre-built Nix. Probably
|
||||||
|
# want to use in conjunction with `doBuild = false;`.
|
||||||
|
, test-daemon ? null
|
||||||
|
, test-client ? null
|
||||||
|
|
||||||
|
# Avoid setting things that would interfere with a functioning devShell
|
||||||
|
, forDevShell ? false
|
||||||
|
|
||||||
|
# Not a real argument, just the only way to approximate let-binding some
|
||||||
|
# stuff for argument defaults.
|
||||||
|
, __forDefaults ? {
|
||||||
|
canExecuteHost = stdenv.buildPlatform.canExecute stdenv.hostPlatform;
|
||||||
|
canRunInstalled = doBuild && __forDefaults.canExecuteHost;
|
||||||
|
}
|
||||||
|
}:
|
||||||
|
|
||||||
|
let
|
||||||
|
version = lib.fileContents ./.version + versionSuffix;
|
||||||
|
|
||||||
|
# selected attributes with defaults, will be used to define some
|
||||||
|
# things which should instead be gotten via `finalAttrs` in order to
|
||||||
|
# work with overriding.
|
||||||
|
attrs = {
|
||||||
|
inherit doBuild doCheck doInstallCheck;
|
||||||
|
};
|
||||||
|
|
||||||
|
mkDerivation =
|
||||||
|
if withCoverageChecks
|
||||||
|
then
|
||||||
|
# TODO support `finalAttrs` args function in
|
||||||
|
# `releaseTools.coverageAnalysis`.
|
||||||
|
argsFun:
|
||||||
|
releaseTools.coverageAnalysis (let args = argsFun args; in args)
|
||||||
|
else stdenv.mkDerivation;
|
||||||
|
in
|
||||||
|
|
||||||
|
mkDerivation (finalAttrs: let
|
||||||
|
|
||||||
|
inherit (finalAttrs)
|
||||||
|
doCheck
|
||||||
|
doInstallCheck
|
||||||
|
;
|
||||||
|
|
||||||
|
doBuild = !finalAttrs.dontBuild;
|
||||||
|
|
||||||
|
# Either running the unit tests during the build, or installing them
|
||||||
|
# to be run later, requiresthe unit tests to be built.
|
||||||
|
buildUnitTests = doCheck || installUnitTests;
|
||||||
|
|
||||||
|
in {
|
||||||
|
inherit pname version;
|
||||||
|
|
||||||
|
src =
|
||||||
|
let
|
||||||
|
baseFiles = fileset.fileFilter (f: f.name != ".gitignore") ./.;
|
||||||
|
in
|
||||||
|
fileset.toSource {
|
||||||
|
root = ./.;
|
||||||
|
fileset = fileset.intersection baseFiles (fileset.unions ([
|
||||||
|
# For configure
|
||||||
|
./.version
|
||||||
|
./configure.ac
|
||||||
|
./m4
|
||||||
|
# TODO: do we really need README.md? It doesn't seem used in the build.
|
||||||
|
./README.md
|
||||||
|
# For make, regardless of what we are building
|
||||||
|
./local.mk
|
||||||
|
./Makefile
|
||||||
|
./Makefile.config.in
|
||||||
|
./mk
|
||||||
|
(fileset.fileFilter (f: lib.strings.hasPrefix "nix-profile" f.name) ./scripts)
|
||||||
|
] ++ lib.optionals doBuild [
|
||||||
|
./doc
|
||||||
|
./misc
|
||||||
|
./precompiled-headers.h
|
||||||
|
./src
|
||||||
|
./COPYING
|
||||||
|
./scripts/local.mk
|
||||||
|
] ++ lib.optionals buildUnitTests [
|
||||||
|
./doc/manual
|
||||||
|
] ++ lib.optionals enableInternalAPIDocs [
|
||||||
|
./doc/internal-api
|
||||||
|
# Source might not be compiled, but still must be available
|
||||||
|
# for Doxygen to gather comments.
|
||||||
|
./src
|
||||||
|
./tests/unit
|
||||||
|
] ++ lib.optionals buildUnitTests [
|
||||||
|
./tests/unit
|
||||||
|
] ++ lib.optionals doInstallCheck [
|
||||||
|
./tests/functional
|
||||||
|
]));
|
||||||
|
};
|
||||||
|
|
||||||
|
VERSION_SUFFIX = versionSuffix;
|
||||||
|
|
||||||
|
outputs = [ "out" ]
|
||||||
|
++ lib.optional doBuild "dev"
|
||||||
|
# If we are doing just build or just docs, the one thing will use
|
||||||
|
# "out". We only need additional outputs if we are doing both.
|
||||||
|
++ lib.optional (doBuild && (enableManual || enableInternalAPIDocs)) "doc"
|
||||||
|
++ lib.optional installUnitTests "check";
|
||||||
|
|
||||||
|
nativeBuildInputs = [
|
||||||
|
autoconf-archive
|
||||||
|
autoreconfHook
|
||||||
|
pkg-config
|
||||||
|
] ++ lib.optionals doBuild [
|
||||||
|
bison
|
||||||
|
flex
|
||||||
|
] ++ lib.optionals enableManual [
|
||||||
|
(lib.getBin lowdown)
|
||||||
|
mdbook
|
||||||
|
mdbook-linkcheck
|
||||||
|
] ++ lib.optionals doInstallCheck [
|
||||||
|
git
|
||||||
|
mercurial
|
||||||
|
openssh
|
||||||
|
man # for testing `nix-* --help`
|
||||||
|
] ++ lib.optionals (doInstallCheck || enableManual) [
|
||||||
|
jq # Also for custom mdBook preprocessor.
|
||||||
|
] ++ lib.optional stdenv.hostPlatform.isLinux util-linux
|
||||||
|
++ lib.optional enableInternalAPIDocs doxygen
|
||||||
|
;
|
||||||
|
|
||||||
|
buildInputs = lib.optionals doBuild [
|
||||||
|
boost
|
||||||
|
brotli
|
||||||
|
bzip2
|
||||||
|
curl
|
||||||
|
libarchive
|
||||||
|
libgit2
|
||||||
|
libsodium
|
||||||
|
openssl
|
||||||
|
sqlite
|
||||||
|
xz
|
||||||
|
({ inherit readline editline; }.${readlineFlavor})
|
||||||
|
] ++ lib.optionals enableMarkdown [
|
||||||
|
lowdown
|
||||||
|
] ++ lib.optionals buildUnitTests [
|
||||||
|
gtest
|
||||||
|
rapidcheck
|
||||||
|
] ++ lib.optional stdenv.isLinux libseccomp
|
||||||
|
++ lib.optional stdenv.hostPlatform.isx86_64 libcpuid
|
||||||
|
# There have been issues building these dependencies
|
||||||
|
++ lib.optional (stdenv.hostPlatform == stdenv.buildPlatform && (stdenv.isLinux || stdenv.isDarwin))
|
||||||
|
(aws-sdk-cpp.override {
|
||||||
|
apis = ["s3" "transfer"];
|
||||||
|
customMemoryManagement = false;
|
||||||
|
})
|
||||||
|
;
|
||||||
|
|
||||||
|
propagatedBuildInputs = [
|
||||||
|
nlohmann_json
|
||||||
|
] ++ lib.optional enableGC boehmgc;
|
||||||
|
|
||||||
|
dontBuild = !attrs.doBuild;
|
||||||
|
doCheck = attrs.doCheck;
|
||||||
|
|
||||||
|
disallowedReferences = [ boost ];
|
||||||
|
|
||||||
|
preConfigure = lib.optionalString (doBuild && ! stdenv.hostPlatform.isStatic) (
|
||||||
|
''
|
||||||
|
# Copy libboost_context so we don't get all of Boost in our closure.
|
||||||
|
# https://github.com/NixOS/nixpkgs/issues/45462
|
||||||
|
mkdir -p $out/lib
|
||||||
|
cp -pd ${boost}/lib/{libboost_context*,libboost_thread*,libboost_system*} $out/lib
|
||||||
|
rm -f $out/lib/*.a
|
||||||
|
'' + lib.optionalString stdenv.hostPlatform.isLinux ''
|
||||||
|
chmod u+w $out/lib/*.so.*
|
||||||
|
patchelf --set-rpath $out/lib:${stdenv.cc.cc.lib}/lib $out/lib/libboost_thread.so.*
|
||||||
|
'' + lib.optionalString stdenv.hostPlatform.isDarwin ''
|
||||||
|
for LIB in $out/lib/*.dylib; do
|
||||||
|
chmod u+w $LIB
|
||||||
|
install_name_tool -id $LIB $LIB
|
||||||
|
install_name_tool -delete_rpath ${boost}/lib/ $LIB || true
|
||||||
|
done
|
||||||
|
install_name_tool -change ${boost}/lib/libboost_system.dylib $out/lib/libboost_system.dylib $out/lib/libboost_thread.dylib
|
||||||
|
''
|
||||||
|
);
|
||||||
|
|
||||||
|
configureFlags = [
|
||||||
|
(lib.enableFeature doBuild "build")
|
||||||
|
(lib.enableFeature buildUnitTests "unit-tests")
|
||||||
|
(lib.enableFeature doInstallCheck "functional-tests")
|
||||||
|
(lib.enableFeature enableInternalAPIDocs "internal-api-docs")
|
||||||
|
(lib.enableFeature enableManual "doc-gen")
|
||||||
|
(lib.enableFeature enableGC "gc")
|
||||||
|
(lib.enableFeature enableMarkdown "markdown")
|
||||||
|
(lib.enableFeature installUnitTests "install-unit-tests")
|
||||||
|
(lib.withFeatureAs true "readline-flavor" readlineFlavor)
|
||||||
|
] ++ lib.optionals (!forDevShell) [
|
||||||
|
"--sysconfdir=/etc"
|
||||||
|
] ++ lib.optionals installUnitTests [
|
||||||
|
"--with-check-bin-dir=${builtins.placeholder "check"}/bin"
|
||||||
|
"--with-check-lib-dir=${builtins.placeholder "check"}/lib"
|
||||||
|
] ++ lib.optionals (doBuild) [
|
||||||
|
"--with-boost=${boost}/lib"
|
||||||
|
] ++ lib.optionals (doBuild && stdenv.isLinux) [
|
||||||
|
"--with-sandbox-shell=${busybox-sandbox-shell}/bin/busybox"
|
||||||
|
] ++ lib.optional (doBuild && stdenv.isLinux && !(stdenv.hostPlatform.isStatic && stdenv.system == "aarch64-linux"))
|
||||||
|
"LDFLAGS=-fuse-ld=gold"
|
||||||
|
++ lib.optional (doBuild && stdenv.hostPlatform.isStatic) "--enable-embedded-sandbox-shell"
|
||||||
|
;
|
||||||
|
|
||||||
|
enableParallelBuilding = true;
|
||||||
|
|
||||||
|
makeFlags = "profiledir=$(out)/etc/profile.d PRECOMPILE_HEADERS=1";
|
||||||
|
|
||||||
|
installTargets = lib.optional doBuild "install"
|
||||||
|
++ lib.optional enableInternalAPIDocs "internal-api-html";
|
||||||
|
|
||||||
|
installFlags = "sysconfdir=$(out)/etc";
|
||||||
|
|
||||||
|
# In this case we are probably just running tests, and so there isn't
|
||||||
|
# anything to install, we just make an empty directory to signify tests
|
||||||
|
# succeeded.
|
||||||
|
installPhase = if finalAttrs.installTargets != [] then null else ''
|
||||||
|
mkdir -p $out
|
||||||
|
'';
|
||||||
|
|
||||||
|
postInstall = lib.optionalString doBuild (
|
||||||
|
lib.optionalString stdenv.hostPlatform.isStatic ''
|
||||||
|
mkdir -p $out/nix-support
|
||||||
|
echo "file binary-dist $out/bin/nix" >> $out/nix-support/hydra-build-products
|
||||||
|
'' + lib.optionalString stdenv.isDarwin ''
|
||||||
|
install_name_tool \
|
||||||
|
-change ${boost}/lib/libboost_context.dylib \
|
||||||
|
$out/lib/libboost_context.dylib \
|
||||||
|
$out/lib/libnixutil.dylib
|
||||||
|
''
|
||||||
|
) + lib.optionalString enableManual ''
|
||||||
|
mkdir -p ''${!outputDoc}/nix-support
|
||||||
|
echo "doc manual ''${!outputDoc}/share/doc/nix/manual" >> ''${!outputDoc}/nix-support/hydra-build-products
|
||||||
|
'' + lib.optionalString enableInternalAPIDocs ''
|
||||||
|
mkdir -p ''${!outputDoc}/nix-support
|
||||||
|
echo "doc internal-api-docs $out/share/doc/nix/internal-api/html" >> ''${!outputDoc}/nix-support/hydra-build-products
|
||||||
|
'';
|
||||||
|
|
||||||
|
doInstallCheck = attrs.doInstallCheck;
|
||||||
|
|
||||||
|
installCheckFlags = "sysconfdir=$(out)/etc";
|
||||||
|
# Work around buggy detection in stdenv.
|
||||||
|
installCheckTarget = "installcheck";
|
||||||
|
|
||||||
|
# Work around weird bug where it doesn't think there is a Makefile.
|
||||||
|
installCheckPhase = if (!doBuild && doInstallCheck) then ''
|
||||||
|
runHook preInstallCheck
|
||||||
|
mkdir -p src/nix-channel
|
||||||
|
make installcheck -j$NIX_BUILD_CORES -l$NIX_BUILD_CORES
|
||||||
|
'' else null;
|
||||||
|
|
||||||
|
# Needed for tests if we are not doing a build, but testing existing
|
||||||
|
# built Nix.
|
||||||
|
preInstallCheck =
|
||||||
|
lib.optionalString (! doBuild) ''
|
||||||
|
mkdir -p src/nix-channel
|
||||||
|
''
|
||||||
|
# See https://github.com/NixOS/nix/issues/2523
|
||||||
|
# Occurs often in tests since https://github.com/NixOS/nix/pull/9900
|
||||||
|
+ lib.optionalString stdenv.hostPlatform.isDarwin ''
|
||||||
|
export OBJC_DISABLE_INITIALIZE_FORK_SAFETY=YES
|
||||||
|
'';
|
||||||
|
|
||||||
|
separateDebugInfo = !stdenv.hostPlatform.isStatic;
|
||||||
|
|
||||||
|
# TODO `releaseTools.coverageAnalysis` in Nixpkgs needs to be updated
|
||||||
|
# to work with `strictDeps`.
|
||||||
|
strictDeps = !withCoverageChecks;
|
||||||
|
|
||||||
|
hardeningDisable = lib.optional stdenv.hostPlatform.isStatic "pie";
|
||||||
|
|
||||||
|
meta = {
|
||||||
|
platforms = lib.platforms.unix ++ lib.platforms.windows;
|
||||||
|
mainProgram = "nix";
|
||||||
|
broken = !(lib.all (a: a) [
|
||||||
|
# We cannot run or install unit tests if we don't build them or
|
||||||
|
# Nix proper (which they depend on).
|
||||||
|
(installUnitTests -> doBuild)
|
||||||
|
(doCheck -> doBuild)
|
||||||
|
# The build process for the manual currently requires extracting
|
||||||
|
# data from the Nix executable we are trying to document.
|
||||||
|
(enableManual -> doBuild)
|
||||||
|
]);
|
||||||
|
};
|
||||||
|
|
||||||
|
} // lib.optionalAttrs withCoverageChecks {
|
||||||
|
lcovFilter = [ "*/boost/*" "*-tab.*" ];
|
||||||
|
|
||||||
|
hardeningDisable = ["fortify"];
|
||||||
|
|
||||||
|
NIX_CFLAGS_COMPILE = "-DCOVERAGE=1";
|
||||||
|
|
||||||
|
dontInstall = false;
|
||||||
|
} // lib.optionalAttrs (test-daemon != null) {
|
||||||
|
NIX_DAEMON_PACKAGE = test-daemon;
|
||||||
|
} // lib.optionalAttrs (test-client != null) {
|
||||||
|
NIX_CLIENT_PACKAGE = test-client;
|
||||||
|
})
|
2
perl/.yath.rc
Normal file
2
perl/.yath.rc
Normal file
|
@ -0,0 +1,2 @@
|
||||||
|
[test]
|
||||||
|
-I=rel(lib/Nix)
|
|
@ -5,12 +5,12 @@
|
||||||
, nix, curl, bzip2, xz, boost, libsodium, darwin
|
, nix, curl, bzip2, xz, boost, libsodium, darwin
|
||||||
}:
|
}:
|
||||||
|
|
||||||
perl.pkgs.toPerlModule (stdenv.mkDerivation {
|
perl.pkgs.toPerlModule (stdenv.mkDerivation (finalAttrs: {
|
||||||
name = "nix-perl-${nix.version}";
|
name = "nix-perl-${nix.version}";
|
||||||
|
|
||||||
src = fileset.toSource {
|
src = fileset.toSource {
|
||||||
root = ../.;
|
root = ../.;
|
||||||
fileset = fileset.unions [
|
fileset = fileset.unions ([
|
||||||
../.version
|
../.version
|
||||||
../m4
|
../m4
|
||||||
../mk
|
../mk
|
||||||
|
@ -20,7 +20,10 @@ perl.pkgs.toPerlModule (stdenv.mkDerivation {
|
||||||
./configure.ac
|
./configure.ac
|
||||||
./lib
|
./lib
|
||||||
./local.mk
|
./local.mk
|
||||||
];
|
] ++ lib.optionals finalAttrs.doCheck [
|
||||||
|
./.yath.rc
|
||||||
|
./t
|
||||||
|
]);
|
||||||
};
|
};
|
||||||
|
|
||||||
nativeBuildInputs =
|
nativeBuildInputs =
|
||||||
|
@ -40,6 +43,13 @@ perl.pkgs.toPerlModule (stdenv.mkDerivation {
|
||||||
++ lib.optional (stdenv.isLinux || stdenv.isDarwin) libsodium
|
++ lib.optional (stdenv.isLinux || stdenv.isDarwin) libsodium
|
||||||
++ lib.optional stdenv.isDarwin darwin.apple_sdk.frameworks.Security;
|
++ lib.optional stdenv.isDarwin darwin.apple_sdk.frameworks.Security;
|
||||||
|
|
||||||
|
# `perlPackages.Test2Harness` is marked broken for Darwin
|
||||||
|
doCheck = !stdenv.isDarwin;
|
||||||
|
|
||||||
|
nativeCheckInputs = [
|
||||||
|
perlPackages.Test2Harness
|
||||||
|
];
|
||||||
|
|
||||||
configureFlags = [
|
configureFlags = [
|
||||||
"--with-dbi=${perlPackages.DBI}/${perl.libPrefix}"
|
"--with-dbi=${perlPackages.DBI}/${perl.libPrefix}"
|
||||||
"--with-dbd-sqlite=${perlPackages.DBDSQLite}/${perl.libPrefix}"
|
"--with-dbd-sqlite=${perlPackages.DBDSQLite}/${perl.libPrefix}"
|
||||||
|
@ -48,4 +58,4 @@ perl.pkgs.toPerlModule (stdenv.mkDerivation {
|
||||||
enableParallelBuilding = true;
|
enableParallelBuilding = true;
|
||||||
|
|
||||||
postUnpack = "sourceRoot=$sourceRoot/perl";
|
postUnpack = "sourceRoot=$sourceRoot/perl";
|
||||||
})
|
}))
|
||||||
|
|
|
@ -12,17 +12,20 @@ our %EXPORT_TAGS = ( 'all' => [ qw( ) ] );
|
||||||
our @EXPORT_OK = ( @{ $EXPORT_TAGS{'all'} } );
|
our @EXPORT_OK = ( @{ $EXPORT_TAGS{'all'} } );
|
||||||
|
|
||||||
our @EXPORT = qw(
|
our @EXPORT = qw(
|
||||||
setVerbosity
|
StoreWrapper
|
||||||
isValidPath queryReferences queryPathInfo queryDeriver queryPathHash
|
StoreWrapper::new
|
||||||
queryPathFromHashPart
|
StoreWrapper::isValidPath StoreWrapper::queryReferences StoreWrapper::queryPathInfo StoreWrapper::queryDeriver StoreWrapper::queryPathHash
|
||||||
topoSortPaths computeFSClosure followLinksToStorePath exportPaths importPaths
|
StoreWrapper::queryPathFromHashPart
|
||||||
|
StoreWrapper::topoSortPaths StoreWrapper::computeFSClosure followLinksToStorePath StoreWrapper::exportPaths StoreWrapper::importPaths
|
||||||
|
StoreWrapper::addToStore StoreWrapper::makeFixedOutputPath
|
||||||
|
StoreWrapper::derivationFromPath
|
||||||
|
StoreWrapper::addTempRoot
|
||||||
|
StoreWrapper::queryRawRealisation
|
||||||
|
|
||||||
hashPath hashFile hashString convertHash
|
hashPath hashFile hashString convertHash
|
||||||
signString checkSignature
|
signString checkSignature
|
||||||
addToStore makeFixedOutputPath
|
|
||||||
derivationFromPath
|
|
||||||
addTempRoot
|
|
||||||
getBinDir getStoreDir
|
getBinDir getStoreDir
|
||||||
queryRawRealisation
|
setVerbosity
|
||||||
);
|
);
|
||||||
|
|
||||||
our $VERSION = '0.15';
|
our $VERSION = '0.15';
|
||||||
|
|
|
@ -12,52 +12,66 @@
|
||||||
#include "realisation.hh"
|
#include "realisation.hh"
|
||||||
#include "globals.hh"
|
#include "globals.hh"
|
||||||
#include "store-api.hh"
|
#include "store-api.hh"
|
||||||
#include "crypto.hh"
|
#include "posix-source-accessor.hh"
|
||||||
|
|
||||||
#include <sodium.h>
|
#include <sodium.h>
|
||||||
#include <nlohmann/json.hpp>
|
#include <nlohmann/json.hpp>
|
||||||
|
|
||||||
|
|
||||||
using namespace nix;
|
using namespace nix;
|
||||||
|
|
||||||
|
static bool libStoreInitialized = false;
|
||||||
|
|
||||||
static ref<Store> store()
|
struct StoreWrapper {
|
||||||
{
|
ref<Store> store;
|
||||||
static std::shared_ptr<Store> _store;
|
};
|
||||||
if (!_store) {
|
|
||||||
try {
|
|
||||||
initLibStore();
|
|
||||||
_store = openStore();
|
|
||||||
} catch (Error & e) {
|
|
||||||
croak("%s", e.what());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return ref<Store>(_store);
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
MODULE = Nix::Store PACKAGE = Nix::Store
|
MODULE = Nix::Store PACKAGE = Nix::Store
|
||||||
PROTOTYPES: ENABLE
|
PROTOTYPES: ENABLE
|
||||||
|
|
||||||
|
TYPEMAP: <<HERE
|
||||||
|
StoreWrapper * O_OBJECT
|
||||||
|
|
||||||
|
OUTPUT
|
||||||
|
O_OBJECT
|
||||||
|
sv_setref_pv( $arg, CLASS, (void*)$var );
|
||||||
|
|
||||||
|
INPUT
|
||||||
|
O_OBJECT
|
||||||
|
if ( sv_isobject($arg) && (SvTYPE(SvRV($arg)) == SVt_PVMG) ) {
|
||||||
|
$var = ($type)SvIV((SV*)SvRV( $arg ));
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
warn( \"${Package}::$func_name() -- \"
|
||||||
|
\"$var not a blessed SV reference\");
|
||||||
|
XSRETURN_UNDEF;
|
||||||
|
}
|
||||||
|
HERE
|
||||||
|
|
||||||
#undef dNOOP // Hack to work around "error: declaration of 'Perl___notused' has a different language linkage" error message on clang.
|
#undef dNOOP // Hack to work around "error: declaration of 'Perl___notused' has a different language linkage" error message on clang.
|
||||||
#define dNOOP
|
#define dNOOP
|
||||||
|
|
||||||
|
void
|
||||||
|
StoreWrapper::DESTROY()
|
||||||
|
|
||||||
void init()
|
StoreWrapper *
|
||||||
CODE:
|
StoreWrapper::new(char * s = nullptr)
|
||||||
store();
|
|
||||||
|
|
||||||
|
|
||||||
void setVerbosity(int level)
|
|
||||||
CODE:
|
|
||||||
verbosity = (Verbosity) level;
|
|
||||||
|
|
||||||
|
|
||||||
int isValidPath(char * path)
|
|
||||||
CODE:
|
CODE:
|
||||||
|
static std::shared_ptr<Store> _store;
|
||||||
try {
|
try {
|
||||||
RETVAL = store()->isValidPath(store()->parseStorePath(path));
|
if (!libStoreInitialized) {
|
||||||
|
initLibStore();
|
||||||
|
libStoreInitialized = true;
|
||||||
|
}
|
||||||
|
if (items == 1) {
|
||||||
|
_store = openStore();
|
||||||
|
RETVAL = new StoreWrapper {
|
||||||
|
.store = ref<Store>{_store}
|
||||||
|
};
|
||||||
|
} else {
|
||||||
|
RETVAL = new StoreWrapper {
|
||||||
|
.store = openStore(s)
|
||||||
|
};
|
||||||
|
}
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
croak("%s", e.what());
|
croak("%s", e.what());
|
||||||
}
|
}
|
||||||
|
@ -65,52 +79,81 @@ int isValidPath(char * path)
|
||||||
RETVAL
|
RETVAL
|
||||||
|
|
||||||
|
|
||||||
SV * queryReferences(char * path)
|
void init()
|
||||||
|
CODE:
|
||||||
|
if (!libStoreInitialized) {
|
||||||
|
initLibStore();
|
||||||
|
libStoreInitialized = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
void setVerbosity(int level)
|
||||||
|
CODE:
|
||||||
|
verbosity = (Verbosity) level;
|
||||||
|
|
||||||
|
|
||||||
|
int
|
||||||
|
StoreWrapper::isValidPath(char * path)
|
||||||
|
CODE:
|
||||||
|
try {
|
||||||
|
RETVAL = THIS->store->isValidPath(THIS->store->parseStorePath(path));
|
||||||
|
} catch (Error & e) {
|
||||||
|
croak("%s", e.what());
|
||||||
|
}
|
||||||
|
OUTPUT:
|
||||||
|
RETVAL
|
||||||
|
|
||||||
|
|
||||||
|
SV *
|
||||||
|
StoreWrapper::queryReferences(char * path)
|
||||||
PPCODE:
|
PPCODE:
|
||||||
try {
|
try {
|
||||||
for (auto & i : store()->queryPathInfo(store()->parseStorePath(path))->references)
|
for (auto & i : THIS->store->queryPathInfo(THIS->store->parseStorePath(path))->references)
|
||||||
XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(i).c_str(), 0)));
|
XPUSHs(sv_2mortal(newSVpv(THIS->store->printStorePath(i).c_str(), 0)));
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
croak("%s", e.what());
|
croak("%s", e.what());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
SV * queryPathHash(char * path)
|
SV *
|
||||||
|
StoreWrapper::queryPathHash(char * path)
|
||||||
PPCODE:
|
PPCODE:
|
||||||
try {
|
try {
|
||||||
auto s = store()->queryPathInfo(store()->parseStorePath(path))->narHash.to_string(HashFormat::Base32, true);
|
auto s = THIS->store->queryPathInfo(THIS->store->parseStorePath(path))->narHash.to_string(HashFormat::Nix32, true);
|
||||||
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
croak("%s", e.what());
|
croak("%s", e.what());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
SV * queryDeriver(char * path)
|
SV *
|
||||||
|
StoreWrapper::queryDeriver(char * path)
|
||||||
PPCODE:
|
PPCODE:
|
||||||
try {
|
try {
|
||||||
auto info = store()->queryPathInfo(store()->parseStorePath(path));
|
auto info = THIS->store->queryPathInfo(THIS->store->parseStorePath(path));
|
||||||
if (!info->deriver) XSRETURN_UNDEF;
|
if (!info->deriver) XSRETURN_UNDEF;
|
||||||
XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(*info->deriver).c_str(), 0)));
|
XPUSHs(sv_2mortal(newSVpv(THIS->store->printStorePath(*info->deriver).c_str(), 0)));
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
croak("%s", e.what());
|
croak("%s", e.what());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
SV * queryPathInfo(char * path, int base32)
|
SV *
|
||||||
|
StoreWrapper::queryPathInfo(char * path, int base32)
|
||||||
PPCODE:
|
PPCODE:
|
||||||
try {
|
try {
|
||||||
auto info = store()->queryPathInfo(store()->parseStorePath(path));
|
auto info = THIS->store->queryPathInfo(THIS->store->parseStorePath(path));
|
||||||
if (!info->deriver)
|
if (!info->deriver)
|
||||||
XPUSHs(&PL_sv_undef);
|
XPUSHs(&PL_sv_undef);
|
||||||
else
|
else
|
||||||
XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(*info->deriver).c_str(), 0)));
|
XPUSHs(sv_2mortal(newSVpv(THIS->store->printStorePath(*info->deriver).c_str(), 0)));
|
||||||
auto s = info->narHash.to_string(base32 ? HashFormat::Base32 : HashFormat::Base16, true);
|
auto s = info->narHash.to_string(base32 ? HashFormat::Nix32 : HashFormat::Base16, true);
|
||||||
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
||||||
mXPUSHi(info->registrationTime);
|
mXPUSHi(info->registrationTime);
|
||||||
mXPUSHi(info->narSize);
|
mXPUSHi(info->narSize);
|
||||||
AV * refs = newAV();
|
AV * refs = newAV();
|
||||||
for (auto & i : info->references)
|
for (auto & i : info->references)
|
||||||
av_push(refs, newSVpv(store()->printStorePath(i).c_str(), 0));
|
av_push(refs, newSVpv(THIS->store->printStorePath(i).c_str(), 0));
|
||||||
XPUSHs(sv_2mortal(newRV((SV *) refs)));
|
XPUSHs(sv_2mortal(newRV((SV *) refs)));
|
||||||
AV * sigs = newAV();
|
AV * sigs = newAV();
|
||||||
for (auto & i : info->sigs)
|
for (auto & i : info->sigs)
|
||||||
|
@ -120,10 +163,11 @@ SV * queryPathInfo(char * path, int base32)
|
||||||
croak("%s", e.what());
|
croak("%s", e.what());
|
||||||
}
|
}
|
||||||
|
|
||||||
SV * queryRawRealisation(char * outputId)
|
SV *
|
||||||
|
StoreWrapper::queryRawRealisation(char * outputId)
|
||||||
PPCODE:
|
PPCODE:
|
||||||
try {
|
try {
|
||||||
auto realisation = store()->queryRealisation(DrvOutput::parse(outputId));
|
auto realisation = THIS->store->queryRealisation(DrvOutput::parse(outputId));
|
||||||
if (realisation)
|
if (realisation)
|
||||||
XPUSHs(sv_2mortal(newSVpv(realisation->toJSON().dump().c_str(), 0)));
|
XPUSHs(sv_2mortal(newSVpv(realisation->toJSON().dump().c_str(), 0)));
|
||||||
else
|
else
|
||||||
|
@ -133,46 +177,50 @@ SV * queryRawRealisation(char * outputId)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
SV * queryPathFromHashPart(char * hashPart)
|
SV *
|
||||||
|
StoreWrapper::queryPathFromHashPart(char * hashPart)
|
||||||
PPCODE:
|
PPCODE:
|
||||||
try {
|
try {
|
||||||
auto path = store()->queryPathFromHashPart(hashPart);
|
auto path = THIS->store->queryPathFromHashPart(hashPart);
|
||||||
XPUSHs(sv_2mortal(newSVpv(path ? store()->printStorePath(*path).c_str() : "", 0)));
|
XPUSHs(sv_2mortal(newSVpv(path ? THIS->store->printStorePath(*path).c_str() : "", 0)));
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
croak("%s", e.what());
|
croak("%s", e.what());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
SV * computeFSClosure(int flipDirection, int includeOutputs, ...)
|
SV *
|
||||||
|
StoreWrapper::computeFSClosure(int flipDirection, int includeOutputs, ...)
|
||||||
PPCODE:
|
PPCODE:
|
||||||
try {
|
try {
|
||||||
StorePathSet paths;
|
StorePathSet paths;
|
||||||
for (int n = 2; n < items; ++n)
|
for (int n = 2; n < items; ++n)
|
||||||
store()->computeFSClosure(store()->parseStorePath(SvPV_nolen(ST(n))), paths, flipDirection, includeOutputs);
|
THIS->store->computeFSClosure(THIS->store->parseStorePath(SvPV_nolen(ST(n))), paths, flipDirection, includeOutputs);
|
||||||
for (auto & i : paths)
|
for (auto & i : paths)
|
||||||
XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(i).c_str(), 0)));
|
XPUSHs(sv_2mortal(newSVpv(THIS->store->printStorePath(i).c_str(), 0)));
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
croak("%s", e.what());
|
croak("%s", e.what());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
SV * topoSortPaths(...)
|
SV *
|
||||||
|
StoreWrapper::topoSortPaths(...)
|
||||||
PPCODE:
|
PPCODE:
|
||||||
try {
|
try {
|
||||||
StorePathSet paths;
|
StorePathSet paths;
|
||||||
for (int n = 0; n < items; ++n) paths.insert(store()->parseStorePath(SvPV_nolen(ST(n))));
|
for (int n = 0; n < items; ++n) paths.insert(THIS->store->parseStorePath(SvPV_nolen(ST(n))));
|
||||||
auto sorted = store()->topoSortPaths(paths);
|
auto sorted = THIS->store->topoSortPaths(paths);
|
||||||
for (auto & i : sorted)
|
for (auto & i : sorted)
|
||||||
XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(i).c_str(), 0)));
|
XPUSHs(sv_2mortal(newSVpv(THIS->store->printStorePath(i).c_str(), 0)));
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
croak("%s", e.what());
|
croak("%s", e.what());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
SV * followLinksToStorePath(char * path)
|
SV *
|
||||||
|
StoreWrapper::followLinksToStorePath(char * path)
|
||||||
CODE:
|
CODE:
|
||||||
try {
|
try {
|
||||||
RETVAL = newSVpv(store()->printStorePath(store()->followLinksToStorePath(path)).c_str(), 0);
|
RETVAL = newSVpv(THIS->store->printStorePath(THIS->store->followLinksToStorePath(path)).c_str(), 0);
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
croak("%s", e.what());
|
croak("%s", e.what());
|
||||||
}
|
}
|
||||||
|
@ -180,33 +228,39 @@ SV * followLinksToStorePath(char * path)
|
||||||
RETVAL
|
RETVAL
|
||||||
|
|
||||||
|
|
||||||
void exportPaths(int fd, ...)
|
void
|
||||||
|
StoreWrapper::exportPaths(int fd, ...)
|
||||||
PPCODE:
|
PPCODE:
|
||||||
try {
|
try {
|
||||||
StorePathSet paths;
|
StorePathSet paths;
|
||||||
for (int n = 1; n < items; ++n) paths.insert(store()->parseStorePath(SvPV_nolen(ST(n))));
|
for (int n = 1; n < items; ++n) paths.insert(THIS->store->parseStorePath(SvPV_nolen(ST(n))));
|
||||||
FdSink sink(fd);
|
FdSink sink(fd);
|
||||||
store()->exportPaths(paths, sink);
|
THIS->store->exportPaths(paths, sink);
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
croak("%s", e.what());
|
croak("%s", e.what());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
void importPaths(int fd, int dontCheckSigs)
|
void
|
||||||
|
StoreWrapper::importPaths(int fd, int dontCheckSigs)
|
||||||
PPCODE:
|
PPCODE:
|
||||||
try {
|
try {
|
||||||
FdSource source(fd);
|
FdSource source(fd);
|
||||||
store()->importPaths(source, dontCheckSigs ? NoCheckSigs : CheckSigs);
|
THIS->store->importPaths(source, dontCheckSigs ? NoCheckSigs : CheckSigs);
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
croak("%s", e.what());
|
croak("%s", e.what());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
SV * hashPath(char * algo, int base32, char * path)
|
SV *
|
||||||
|
hashPath(char * algo, int base32, char * path)
|
||||||
PPCODE:
|
PPCODE:
|
||||||
try {
|
try {
|
||||||
Hash h = hashPath(parseHashType(algo), path).first;
|
auto [accessor, canonPath] = PosixSourceAccessor::createAtRoot(path);
|
||||||
auto s = h.to_string(base32 ? HashFormat::Base32 : HashFormat::Base16, false);
|
Hash h = hashPath(
|
||||||
|
accessor, canonPath,
|
||||||
|
FileIngestionMethod::Recursive, parseHashAlgo(algo));
|
||||||
|
auto s = h.to_string(base32 ? HashFormat::Nix32 : HashFormat::Base16, false);
|
||||||
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
croak("%s", e.what());
|
croak("%s", e.what());
|
||||||
|
@ -216,8 +270,8 @@ SV * hashPath(char * algo, int base32, char * path)
|
||||||
SV * hashFile(char * algo, int base32, char * path)
|
SV * hashFile(char * algo, int base32, char * path)
|
||||||
PPCODE:
|
PPCODE:
|
||||||
try {
|
try {
|
||||||
Hash h = hashFile(parseHashType(algo), path);
|
Hash h = hashFile(parseHashAlgo(algo), path);
|
||||||
auto s = h.to_string(base32 ? HashFormat::Base32 : HashFormat::Base16, false);
|
auto s = h.to_string(base32 ? HashFormat::Nix32 : HashFormat::Base16, false);
|
||||||
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
croak("%s", e.what());
|
croak("%s", e.what());
|
||||||
|
@ -227,8 +281,8 @@ SV * hashFile(char * algo, int base32, char * path)
|
||||||
SV * hashString(char * algo, int base32, char * s)
|
SV * hashString(char * algo, int base32, char * s)
|
||||||
PPCODE:
|
PPCODE:
|
||||||
try {
|
try {
|
||||||
Hash h = hashString(parseHashType(algo), s);
|
Hash h = hashString(parseHashAlgo(algo), s);
|
||||||
auto s = h.to_string(base32 ? HashFormat::Base32 : HashFormat::Base16, false);
|
auto s = h.to_string(base32 ? HashFormat::Nix32 : HashFormat::Base16, false);
|
||||||
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
croak("%s", e.what());
|
croak("%s", e.what());
|
||||||
|
@ -238,8 +292,8 @@ SV * hashString(char * algo, int base32, char * s)
|
||||||
SV * convertHash(char * algo, char * s, int toBase32)
|
SV * convertHash(char * algo, char * s, int toBase32)
|
||||||
PPCODE:
|
PPCODE:
|
||||||
try {
|
try {
|
||||||
auto h = Hash::parseAny(s, parseHashType(algo));
|
auto h = Hash::parseAny(s, parseHashAlgo(algo));
|
||||||
auto s = h.to_string(toBase32 ? HashFormat::Base32 : HashFormat::Base16, false);
|
auto s = h.to_string(toBase32 ? HashFormat::Nix32 : HashFormat::Base16, false);
|
||||||
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
XPUSHs(sv_2mortal(newSVpv(s.c_str(), 0)));
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
croak("%s", e.what());
|
croak("%s", e.what());
|
||||||
|
@ -277,60 +331,67 @@ int checkSignature(SV * publicKey_, SV * sig_, char * msg)
|
||||||
RETVAL
|
RETVAL
|
||||||
|
|
||||||
|
|
||||||
SV * addToStore(char * srcPath, int recursive, char * algo)
|
SV *
|
||||||
|
StoreWrapper::addToStore(char * srcPath, int recursive, char * algo)
|
||||||
PPCODE:
|
PPCODE:
|
||||||
try {
|
try {
|
||||||
auto method = recursive ? FileIngestionMethod::Recursive : FileIngestionMethod::Flat;
|
auto method = recursive ? FileIngestionMethod::Recursive : FileIngestionMethod::Flat;
|
||||||
auto path = store()->addToStore(std::string(baseNameOf(srcPath)), srcPath, method, parseHashType(algo));
|
auto [accessor, canonPath] = PosixSourceAccessor::createAtRoot(srcPath);
|
||||||
XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(path).c_str(), 0)));
|
auto path = THIS->store->addToStore(
|
||||||
|
std::string(baseNameOf(srcPath)),
|
||||||
|
accessor, canonPath,
|
||||||
|
method, parseHashAlgo(algo));
|
||||||
|
XPUSHs(sv_2mortal(newSVpv(THIS->store->printStorePath(path).c_str(), 0)));
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
croak("%s", e.what());
|
croak("%s", e.what());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
SV * makeFixedOutputPath(int recursive, char * algo, char * hash, char * name)
|
SV *
|
||||||
|
StoreWrapper::makeFixedOutputPath(int recursive, char * algo, char * hash, char * name)
|
||||||
PPCODE:
|
PPCODE:
|
||||||
try {
|
try {
|
||||||
auto h = Hash::parseAny(hash, parseHashType(algo));
|
auto h = Hash::parseAny(hash, parseHashAlgo(algo));
|
||||||
auto method = recursive ? FileIngestionMethod::Recursive : FileIngestionMethod::Flat;
|
auto method = recursive ? FileIngestionMethod::Recursive : FileIngestionMethod::Flat;
|
||||||
auto path = store()->makeFixedOutputPath(name, FixedOutputInfo {
|
auto path = THIS->store->makeFixedOutputPath(name, FixedOutputInfo {
|
||||||
.method = method,
|
.method = method,
|
||||||
.hash = h,
|
.hash = h,
|
||||||
.references = {},
|
.references = {},
|
||||||
});
|
});
|
||||||
XPUSHs(sv_2mortal(newSVpv(store()->printStorePath(path).c_str(), 0)));
|
XPUSHs(sv_2mortal(newSVpv(THIS->store->printStorePath(path).c_str(), 0)));
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
croak("%s", e.what());
|
croak("%s", e.what());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
SV * derivationFromPath(char * drvPath)
|
SV *
|
||||||
|
StoreWrapper::derivationFromPath(char * drvPath)
|
||||||
PREINIT:
|
PREINIT:
|
||||||
HV *hash;
|
HV *hash;
|
||||||
CODE:
|
CODE:
|
||||||
try {
|
try {
|
||||||
Derivation drv = store()->derivationFromPath(store()->parseStorePath(drvPath));
|
Derivation drv = THIS->store->derivationFromPath(THIS->store->parseStorePath(drvPath));
|
||||||
hash = newHV();
|
hash = newHV();
|
||||||
|
|
||||||
HV * outputs = newHV();
|
HV * outputs = newHV();
|
||||||
for (auto & i : drv.outputsAndOptPaths(*store())) {
|
for (auto & i : drv.outputsAndOptPaths(*THIS->store)) {
|
||||||
hv_store(
|
hv_store(
|
||||||
outputs, i.first.c_str(), i.first.size(),
|
outputs, i.first.c_str(), i.first.size(),
|
||||||
!i.second.second
|
!i.second.second
|
||||||
? newSV(0) /* null value */
|
? newSV(0) /* null value */
|
||||||
: newSVpv(store()->printStorePath(*i.second.second).c_str(), 0),
|
: newSVpv(THIS->store->printStorePath(*i.second.second).c_str(), 0),
|
||||||
0);
|
0);
|
||||||
}
|
}
|
||||||
hv_stores(hash, "outputs", newRV((SV *) outputs));
|
hv_stores(hash, "outputs", newRV((SV *) outputs));
|
||||||
|
|
||||||
AV * inputDrvs = newAV();
|
AV * inputDrvs = newAV();
|
||||||
for (auto & i : drv.inputDrvs.map)
|
for (auto & i : drv.inputDrvs.map)
|
||||||
av_push(inputDrvs, newSVpv(store()->printStorePath(i.first).c_str(), 0)); // !!! ignores i->second
|
av_push(inputDrvs, newSVpv(THIS->store->printStorePath(i.first).c_str(), 0)); // !!! ignores i->second
|
||||||
hv_stores(hash, "inputDrvs", newRV((SV *) inputDrvs));
|
hv_stores(hash, "inputDrvs", newRV((SV *) inputDrvs));
|
||||||
|
|
||||||
AV * inputSrcs = newAV();
|
AV * inputSrcs = newAV();
|
||||||
for (auto & i : drv.inputSrcs)
|
for (auto & i : drv.inputSrcs)
|
||||||
av_push(inputSrcs, newSVpv(store()->printStorePath(i).c_str(), 0));
|
av_push(inputSrcs, newSVpv(THIS->store->printStorePath(i).c_str(), 0));
|
||||||
hv_stores(hash, "inputSrcs", newRV((SV *) inputSrcs));
|
hv_stores(hash, "inputSrcs", newRV((SV *) inputSrcs));
|
||||||
|
|
||||||
hv_stores(hash, "platform", newSVpv(drv.platform.c_str(), 0));
|
hv_stores(hash, "platform", newSVpv(drv.platform.c_str(), 0));
|
||||||
|
@ -354,10 +415,11 @@ SV * derivationFromPath(char * drvPath)
|
||||||
RETVAL
|
RETVAL
|
||||||
|
|
||||||
|
|
||||||
void addTempRoot(char * storePath)
|
void
|
||||||
|
StoreWrapper::addTempRoot(char * storePath)
|
||||||
PPCODE:
|
PPCODE:
|
||||||
try {
|
try {
|
||||||
store()->addTempRoot(store()->parseStorePath(storePath));
|
THIS->store->addTempRoot(THIS->store->parseStorePath(storePath));
|
||||||
} catch (Error & e) {
|
} catch (Error & e) {
|
||||||
croak("%s", e.what());
|
croak("%s", e.what());
|
||||||
}
|
}
|
||||||
|
|
|
@ -41,3 +41,6 @@ Store_FORCE_INSTALL = 1
|
||||||
Store_INSTALL_DIR = $(perllibdir)/auto/Nix/Store
|
Store_INSTALL_DIR = $(perllibdir)/auto/Nix/Store
|
||||||
|
|
||||||
clean-files += lib/Nix/Config.pm lib/Nix/Store.cc Makefile.config
|
clean-files += lib/Nix/Config.pm lib/Nix/Store.cc Makefile.config
|
||||||
|
|
||||||
|
check: all
|
||||||
|
yath test
|
||||||
|
|
13
perl/t/init.t
Normal file
13
perl/t/init.t
Normal file
|
@ -0,0 +1,13 @@
|
||||||
|
use strict;
|
||||||
|
use warnings;
|
||||||
|
use Test2::V0;
|
||||||
|
|
||||||
|
use Nix::Store;
|
||||||
|
|
||||||
|
my $s = new Nix::Store("dummy://");
|
||||||
|
|
||||||
|
my $res = $s->isValidPath("/nix/store/g1w7hy3qg1w7hy3qg1w7hy3qg1w7hy3q-bar");
|
||||||
|
|
||||||
|
ok(!$res, "should not have path");
|
||||||
|
|
||||||
|
done_testing;
|
84
scripts/binary-tarball.nix
Normal file
84
scripts/binary-tarball.nix
Normal file
|
@ -0,0 +1,84 @@
|
||||||
|
{ runCommand
|
||||||
|
, system
|
||||||
|
, buildPackages
|
||||||
|
, cacert
|
||||||
|
, nix
|
||||||
|
}:
|
||||||
|
|
||||||
|
let
|
||||||
|
|
||||||
|
installerClosureInfo = buildPackages.closureInfo {
|
||||||
|
rootPaths = [ nix cacert ];
|
||||||
|
};
|
||||||
|
|
||||||
|
inherit (nix) version;
|
||||||
|
|
||||||
|
env = {
|
||||||
|
#nativeBuildInputs = lib.optional (system != "aarch64-linux") shellcheck;
|
||||||
|
meta.description = "Distribution-independent Nix bootstrap binaries for ${system}";
|
||||||
|
};
|
||||||
|
|
||||||
|
in
|
||||||
|
|
||||||
|
runCommand "nix-binary-tarball-${version}" env ''
|
||||||
|
cp ${installerClosureInfo}/registration $TMPDIR/reginfo
|
||||||
|
cp ${./create-darwin-volume.sh} $TMPDIR/create-darwin-volume.sh
|
||||||
|
substitute ${./install-nix-from-closure.sh} $TMPDIR/install \
|
||||||
|
--subst-var-by nix ${nix} \
|
||||||
|
--subst-var-by cacert ${cacert}
|
||||||
|
|
||||||
|
substitute ${./install-darwin-multi-user.sh} $TMPDIR/install-darwin-multi-user.sh \
|
||||||
|
--subst-var-by nix ${nix} \
|
||||||
|
--subst-var-by cacert ${cacert}
|
||||||
|
substitute ${./install-systemd-multi-user.sh} $TMPDIR/install-systemd-multi-user.sh \
|
||||||
|
--subst-var-by nix ${nix} \
|
||||||
|
--subst-var-by cacert ${cacert}
|
||||||
|
substitute ${./install-multi-user.sh} $TMPDIR/install-multi-user \
|
||||||
|
--subst-var-by nix ${nix} \
|
||||||
|
--subst-var-by cacert ${cacert}
|
||||||
|
|
||||||
|
if type -p shellcheck; then
|
||||||
|
# SC1090: Don't worry about not being able to find
|
||||||
|
# $nix/etc/profile.d/nix.sh
|
||||||
|
shellcheck --exclude SC1090 $TMPDIR/install
|
||||||
|
shellcheck $TMPDIR/create-darwin-volume.sh
|
||||||
|
shellcheck $TMPDIR/install-darwin-multi-user.sh
|
||||||
|
shellcheck $TMPDIR/install-systemd-multi-user.sh
|
||||||
|
|
||||||
|
# SC1091: Don't panic about not being able to source
|
||||||
|
# /etc/profile
|
||||||
|
# SC2002: Ignore "useless cat" "error", when loading
|
||||||
|
# .reginfo, as the cat is a much cleaner
|
||||||
|
# implementation, even though it is "useless"
|
||||||
|
# SC2116: Allow ROOT_HOME=$(echo ~root) for resolving
|
||||||
|
# root's home directory
|
||||||
|
shellcheck --external-sources \
|
||||||
|
--exclude SC1091,SC2002,SC2116 $TMPDIR/install-multi-user
|
||||||
|
fi
|
||||||
|
|
||||||
|
chmod +x $TMPDIR/install
|
||||||
|
chmod +x $TMPDIR/create-darwin-volume.sh
|
||||||
|
chmod +x $TMPDIR/install-darwin-multi-user.sh
|
||||||
|
chmod +x $TMPDIR/install-systemd-multi-user.sh
|
||||||
|
chmod +x $TMPDIR/install-multi-user
|
||||||
|
dir=nix-${version}-${system}
|
||||||
|
fn=$out/$dir.tar.xz
|
||||||
|
mkdir -p $out/nix-support
|
||||||
|
echo "file binary-dist $fn" >> $out/nix-support/hydra-build-products
|
||||||
|
tar cvfJ $fn \
|
||||||
|
--owner=0 --group=0 --mode=u+rw,uga+r \
|
||||||
|
--mtime='1970-01-01' \
|
||||||
|
--absolute-names \
|
||||||
|
--hard-dereference \
|
||||||
|
--transform "s,$TMPDIR/install,$dir/install," \
|
||||||
|
--transform "s,$TMPDIR/create-darwin-volume.sh,$dir/create-darwin-volume.sh," \
|
||||||
|
--transform "s,$TMPDIR/reginfo,$dir/.reginfo," \
|
||||||
|
--transform "s,$NIX_STORE,$dir/store,S" \
|
||||||
|
$TMPDIR/install \
|
||||||
|
$TMPDIR/create-darwin-volume.sh \
|
||||||
|
$TMPDIR/install-darwin-multi-user.sh \
|
||||||
|
$TMPDIR/install-systemd-multi-user.sh \
|
||||||
|
$TMPDIR/install-multi-user \
|
||||||
|
$TMPDIR/reginfo \
|
||||||
|
$(cat ${installerClosureInfo}/store-paths)
|
||||||
|
''
|
|
@ -3,11 +3,13 @@
|
||||||
set -eu
|
set -eu
|
||||||
set -o pipefail
|
set -o pipefail
|
||||||
|
|
||||||
|
# System specific settings
|
||||||
|
export NIX_FIRST_BUILD_UID="${NIX_FIRST_BUILD_UID:-301}"
|
||||||
|
export NIX_BUILD_USER_NAME_TEMPLATE="_nixbld%d"
|
||||||
|
|
||||||
readonly NIX_DAEMON_DEST=/Library/LaunchDaemons/org.nixos.nix-daemon.plist
|
readonly NIX_DAEMON_DEST=/Library/LaunchDaemons/org.nixos.nix-daemon.plist
|
||||||
# create by default; set 0 to DIY, use a symlink, etc.
|
# create by default; set 0 to DIY, use a symlink, etc.
|
||||||
readonly NIX_VOLUME_CREATE=${NIX_VOLUME_CREATE:-1} # now default
|
readonly NIX_VOLUME_CREATE=${NIX_VOLUME_CREATE:-1} # now default
|
||||||
NIX_FIRST_BUILD_UID="301"
|
|
||||||
NIX_BUILD_USER_NAME_TEMPLATE="_nixbld%d"
|
|
||||||
|
|
||||||
# caution: may update times on / if not run as normal non-root user
|
# caution: may update times on / if not run as normal non-root user
|
||||||
read_only_root() {
|
read_only_root() {
|
||||||
|
@ -100,7 +102,7 @@ poly_extra_try_me_commands() {
|
||||||
poly_configure_nix_daemon_service() {
|
poly_configure_nix_daemon_service() {
|
||||||
task "Setting up the nix-daemon LaunchDaemon"
|
task "Setting up the nix-daemon LaunchDaemon"
|
||||||
_sudo "to set up the nix-daemon as a LaunchDaemon" \
|
_sudo "to set up the nix-daemon as a LaunchDaemon" \
|
||||||
/usr/bin/install -m -rw-r--r-- "/nix/var/nix/profiles/default$NIX_DAEMON_DEST" "$NIX_DAEMON_DEST"
|
/usr/bin/install -m "u=rw,go=r" "/nix/var/nix/profiles/default$NIX_DAEMON_DEST" "$NIX_DAEMON_DEST"
|
||||||
|
|
||||||
_sudo "to load the LaunchDaemon plist for nix-daemon" \
|
_sudo "to load the LaunchDaemon plist for nix-daemon" \
|
||||||
launchctl load /Library/LaunchDaemons/org.nixos.nix-daemon.plist
|
launchctl load /Library/LaunchDaemons/org.nixos.nix-daemon.plist
|
||||||
|
|
|
@ -25,9 +25,9 @@ readonly RED='\033[31m'
|
||||||
readonly NIX_USER_COUNT=${NIX_USER_COUNT:-32}
|
readonly NIX_USER_COUNT=${NIX_USER_COUNT:-32}
|
||||||
readonly NIX_BUILD_GROUP_ID="${NIX_BUILD_GROUP_ID:-30000}"
|
readonly NIX_BUILD_GROUP_ID="${NIX_BUILD_GROUP_ID:-30000}"
|
||||||
readonly NIX_BUILD_GROUP_NAME="nixbld"
|
readonly NIX_BUILD_GROUP_NAME="nixbld"
|
||||||
# darwin installer needs to override these
|
# each system specific installer must set these:
|
||||||
NIX_FIRST_BUILD_UID="${NIX_FIRST_BUILD_UID:-30001}"
|
# NIX_FIRST_BUILD_UID
|
||||||
NIX_BUILD_USER_NAME_TEMPLATE="nixbld%d"
|
# NIX_BUILD_USER_NAME_TEMPLATE
|
||||||
# Please don't change this. We don't support it, because the
|
# Please don't change this. We don't support it, because the
|
||||||
# default shell profile that comes with Nix doesn't support it.
|
# default shell profile that comes with Nix doesn't support it.
|
||||||
readonly NIX_ROOT="/nix"
|
readonly NIX_ROOT="/nix"
|
||||||
|
@ -707,6 +707,12 @@ EOF
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
|
|
||||||
|
check_required_system_specific_settings() {
|
||||||
|
if [ -z "${NIX_FIRST_BUILD_UID+x}" ] || [ -z "${NIX_BUILD_USER_NAME_TEMPLATE+x}" ]; then
|
||||||
|
failure "Internal error: System specific installer for $(uname) ($1) does not export required settings."
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
welcome_to_nix() {
|
welcome_to_nix() {
|
||||||
local -r NIX_UID_RANGES="${NIX_FIRST_BUILD_UID}..$((NIX_FIRST_BUILD_UID + NIX_USER_COUNT - 1))"
|
local -r NIX_UID_RANGES="${NIX_FIRST_BUILD_UID}..$((NIX_FIRST_BUILD_UID + NIX_USER_COUNT - 1))"
|
||||||
local -r RANGE_TEXT=$(echo -ne "${BLUE}(uids [${NIX_UID_RANGES}])${ESC}")
|
local -r RANGE_TEXT=$(echo -ne "${BLUE}(uids [${NIX_UID_RANGES}])${ESC}")
|
||||||
|
@ -726,7 +732,9 @@ manager. This will happen in a few stages:
|
||||||
if you are ready to continue.
|
if you are ready to continue.
|
||||||
|
|
||||||
3. Create the system users ${RANGE_TEXT} and groups ${GROUP_TEXT}
|
3. Create the system users ${RANGE_TEXT} and groups ${GROUP_TEXT}
|
||||||
that the Nix daemon uses to run builds.
|
that the Nix daemon uses to run builds. To create system users
|
||||||
|
in a different range, exit and run this tool again with
|
||||||
|
NIX_FIRST_BUILD_UID set.
|
||||||
|
|
||||||
4. Perform the basic installation of the Nix files daemon.
|
4. Perform the basic installation of the Nix files daemon.
|
||||||
|
|
||||||
|
@ -968,13 +976,16 @@ main() {
|
||||||
if is_os_darwin; then
|
if is_os_darwin; then
|
||||||
# shellcheck source=./install-darwin-multi-user.sh
|
# shellcheck source=./install-darwin-multi-user.sh
|
||||||
. "$EXTRACTED_NIX_PATH/install-darwin-multi-user.sh"
|
. "$EXTRACTED_NIX_PATH/install-darwin-multi-user.sh"
|
||||||
|
check_required_system_specific_settings "install-darwin-multi-user.sh"
|
||||||
elif is_os_linux; then
|
elif is_os_linux; then
|
||||||
# shellcheck source=./install-systemd-multi-user.sh
|
# shellcheck source=./install-systemd-multi-user.sh
|
||||||
. "$EXTRACTED_NIX_PATH/install-systemd-multi-user.sh" # most of this works on non-systemd distros also
|
. "$EXTRACTED_NIX_PATH/install-systemd-multi-user.sh" # most of this works on non-systemd distros also
|
||||||
|
check_required_system_specific_settings "install-systemd-multi-user.sh"
|
||||||
else
|
else
|
||||||
failure "Sorry, I don't know what to do on $(uname)"
|
failure "Sorry, I don't know what to do on $(uname)"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
|
||||||
welcome_to_nix
|
welcome_to_nix
|
||||||
|
|
||||||
if ! is_root; then
|
if ! is_root; then
|
||||||
|
|
|
@ -3,6 +3,10 @@
|
||||||
set -eu
|
set -eu
|
||||||
set -o pipefail
|
set -o pipefail
|
||||||
|
|
||||||
|
# System specific settings
|
||||||
|
export NIX_FIRST_BUILD_UID="${NIX_FIRST_BUILD_UID:-30001}"
|
||||||
|
export NIX_BUILD_USER_NAME_TEMPLATE="nixbld%d"
|
||||||
|
|
||||||
readonly SERVICE_SRC=/lib/systemd/system/nix-daemon.service
|
readonly SERVICE_SRC=/lib/systemd/system/nix-daemon.service
|
||||||
readonly SERVICE_DEST=/etc/systemd/system/nix-daemon.service
|
readonly SERVICE_DEST=/etc/systemd/system/nix-daemon.service
|
||||||
|
|
||||||
|
|
36
scripts/installer.nix
Normal file
36
scripts/installer.nix
Normal file
|
@ -0,0 +1,36 @@
|
||||||
|
{ lib
|
||||||
|
, runCommand
|
||||||
|
, nix
|
||||||
|
, tarballs
|
||||||
|
}:
|
||||||
|
|
||||||
|
runCommand "installer-script" {
|
||||||
|
buildInputs = [ nix ];
|
||||||
|
} ''
|
||||||
|
mkdir -p $out/nix-support
|
||||||
|
|
||||||
|
# Converts /nix/store/50p3qk8k...-nix-2.4pre20201102_550e11f/bin/nix to 50p3qk8k.../bin/nix.
|
||||||
|
tarballPath() {
|
||||||
|
# Remove the store prefix
|
||||||
|
local path=''${1#${builtins.storeDir}/}
|
||||||
|
# Get the path relative to the derivation root
|
||||||
|
local rest=''${path#*/}
|
||||||
|
# Get the derivation hash
|
||||||
|
local drvHash=''${path%%-*}
|
||||||
|
echo "$drvHash/$rest"
|
||||||
|
}
|
||||||
|
|
||||||
|
substitute ${./install.in} $out/install \
|
||||||
|
${lib.concatMapStrings
|
||||||
|
(tarball: let
|
||||||
|
inherit (tarball.stdenv.hostPlatform) system;
|
||||||
|
in '' \
|
||||||
|
--replace '@tarballHash_${system}@' $(nix --experimental-features nix-command hash-file --base16 --type sha256 ${tarball}/*.tar.xz) \
|
||||||
|
--replace '@tarballPath_${system}@' $(tarballPath ${tarball}/*.tar.xz) \
|
||||||
|
''
|
||||||
|
)
|
||||||
|
tarballs
|
||||||
|
} --replace '@nixVersion@' ${nix.version}
|
||||||
|
|
||||||
|
echo "file installer $out/install" >> $out/nix-support/hydra-build-products
|
||||||
|
''
|
|
@ -28,7 +28,7 @@ else
|
||||||
end
|
end
|
||||||
|
|
||||||
# Set $NIX_SSL_CERT_FILE so that Nixpkgs applications like curl work.
|
# Set $NIX_SSL_CERT_FILE so that Nixpkgs applications like curl work.
|
||||||
if test -n "$NIX_SSH_CERT_FILE"
|
if test -n "$NIX_SSL_CERT_FILE"
|
||||||
: # Allow users to override the NIX_SSL_CERT_FILE
|
: # Allow users to override the NIX_SSL_CERT_FILE
|
||||||
else if test -e /etc/ssl/certs/ca-certificates.crt # NixOS, Ubuntu, Debian, Gentoo, Arch
|
else if test -e /etc/ssl/certs/ca-certificates.crt # NixOS, Ubuntu, Debian, Gentoo, Arch
|
||||||
set --export NIX_SSL_CERT_FILE /etc/ssl/certs/ca-certificates.crt
|
set --export NIX_SSL_CERT_FILE /etc/ssl/certs/ca-certificates.crt
|
||||||
|
@ -44,7 +44,7 @@ else if test -e "$NIX_LINK/etc/ca-bundle.crt" # old cacert in Nix profile
|
||||||
set --export NIX_SSL_CERT_FILE "$NIX_LINK/etc/ca-bundle.crt"
|
set --export NIX_SSL_CERT_FILE "$NIX_LINK/etc/ca-bundle.crt"
|
||||||
else
|
else
|
||||||
# Fall back to what is in the nix profiles, favouring whatever is defined last.
|
# Fall back to what is in the nix profiles, favouring whatever is defined last.
|
||||||
for i in $NIX_PROFILES
|
for i in (string split ' ' $NIX_PROFILES)
|
||||||
if test -e "$i/etc/ssl/certs/ca-bundle.crt"
|
if test -e "$i/etc/ssl/certs/ca-bundle.crt"
|
||||||
set --export NIX_SSL_CERT_FILE "$i/etc/ssl/certs/ca-bundle.crt"
|
set --export NIX_SSL_CERT_FILE "$i/etc/ssl/certs/ca-bundle.crt"
|
||||||
end
|
end
|
||||||
|
|
|
@ -137,11 +137,8 @@ static int main_build_remote(int argc, char * * argv)
|
||||||
for (auto & m : machines) {
|
for (auto & m : machines) {
|
||||||
debug("considering building on remote machine '%s'", m.storeUri);
|
debug("considering building on remote machine '%s'", m.storeUri);
|
||||||
|
|
||||||
if (m.enabled
|
if (m.enabled &&
|
||||||
&& (neededSystem == "builtin"
|
m.systemSupported(neededSystem) &&
|
||||||
|| std::find(m.systemTypes.begin(),
|
|
||||||
m.systemTypes.end(),
|
|
||||||
neededSystem) != m.systemTypes.end()) &&
|
|
||||||
m.allSupported(requiredFeatures) &&
|
m.allSupported(requiredFeatures) &&
|
||||||
m.mandatoryMet(requiredFeatures))
|
m.mandatoryMet(requiredFeatures))
|
||||||
{
|
{
|
||||||
|
@ -205,7 +202,7 @@ static int main_build_remote(int argc, char * * argv)
|
||||||
else
|
else
|
||||||
drvstr = "<unknown>";
|
drvstr = "<unknown>";
|
||||||
|
|
||||||
auto error = hintformat(errorText);
|
auto error = HintFmt(errorText);
|
||||||
error
|
error
|
||||||
% drvstr
|
% drvstr
|
||||||
% neededSystem
|
% neededSystem
|
||||||
|
@ -214,7 +211,7 @@ static int main_build_remote(int argc, char * * argv)
|
||||||
|
|
||||||
for (auto & m : machines)
|
for (auto & m : machines)
|
||||||
error
|
error
|
||||||
% concatStringsSep<std::vector<std::string>>(", ", m.systemTypes)
|
% concatStringsSep<StringSet>(", ", m.systemTypes)
|
||||||
% m.maxJobs
|
% m.maxJobs
|
||||||
% concatStringsSep<StringSet>(", ", m.supportedFeatures)
|
% concatStringsSep<StringSet>(", ", m.supportedFeatures)
|
||||||
% concatStringsSep<StringSet>(", ", m.mandatoryFeatures);
|
% concatStringsSep<StringSet>(", ", m.mandatoryFeatures);
|
||||||
|
|
|
@ -12,9 +12,9 @@ namespace nix {
|
||||||
bool MY_TYPE ::operator COMPARATOR (const MY_TYPE & other) const \
|
bool MY_TYPE ::operator COMPARATOR (const MY_TYPE & other) const \
|
||||||
{ \
|
{ \
|
||||||
const MY_TYPE* me = this; \
|
const MY_TYPE* me = this; \
|
||||||
auto fields1 = std::make_tuple<const CHILD_TYPE &, const FIELD_TYPE &>(*me->drvPath, me->FIELD); \
|
auto fields1 = std::tie(*me->drvPath, me->FIELD); \
|
||||||
me = &other; \
|
me = &other; \
|
||||||
auto fields2 = std::make_tuple<const CHILD_TYPE &, const FIELD_TYPE &>(*me->drvPath, me->FIELD); \
|
auto fields2 = std::tie(*me->drvPath, me->FIELD); \
|
||||||
return fields1 COMPARATOR fields2; \
|
return fields1 COMPARATOR fields2; \
|
||||||
}
|
}
|
||||||
#define CMP(CHILD_TYPE, MY_TYPE, FIELD) \
|
#define CMP(CHILD_TYPE, MY_TYPE, FIELD) \
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
#include "command.hh"
|
#include "command.hh"
|
||||||
|
#include "markdown.hh"
|
||||||
#include "store-api.hh"
|
#include "store-api.hh"
|
||||||
#include "local-fs-store.hh"
|
#include "local-fs-store.hh"
|
||||||
#include "derivations.hh"
|
#include "derivations.hh"
|
||||||
|
@ -34,6 +35,19 @@ nlohmann::json NixMultiCommand::toJSON()
|
||||||
return MultiCommand::toJSON();
|
return MultiCommand::toJSON();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void NixMultiCommand::run()
|
||||||
|
{
|
||||||
|
if (!command) {
|
||||||
|
std::set<std::string> subCommandTextLines;
|
||||||
|
for (auto & [name, _] : commands)
|
||||||
|
subCommandTextLines.insert(fmt("- `%s`", name));
|
||||||
|
std::string markdownError = fmt("`nix %s` requires a sub-command. Available sub-commands:\n\n%s\n",
|
||||||
|
commandName, concatStringsSep("\n", subCommandTextLines));
|
||||||
|
throw UsageError(renderMarkdownToTerminal(markdownError));
|
||||||
|
}
|
||||||
|
command->second->run();
|
||||||
|
}
|
||||||
|
|
||||||
StoreCommand::StoreCommand()
|
StoreCommand::StoreCommand()
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
|
@ -27,9 +27,13 @@ static constexpr Command::Category catNixInstallation = 102;
|
||||||
|
|
||||||
static constexpr auto installablesCategory = "Options that change the interpretation of [installables](@docroot@/command-ref/new-cli/nix.md#installables)";
|
static constexpr auto installablesCategory = "Options that change the interpretation of [installables](@docroot@/command-ref/new-cli/nix.md#installables)";
|
||||||
|
|
||||||
struct NixMultiCommand : virtual MultiCommand, virtual Command
|
struct NixMultiCommand : MultiCommand, virtual Command
|
||||||
{
|
{
|
||||||
nlohmann::json toJSON() override;
|
nlohmann::json toJSON() override;
|
||||||
|
|
||||||
|
using MultiCommand::MultiCommand;
|
||||||
|
|
||||||
|
virtual void run() override;
|
||||||
};
|
};
|
||||||
|
|
||||||
// For the overloaded run methods
|
// For the overloaded run methods
|
||||||
|
|
|
@ -9,6 +9,7 @@
|
||||||
#include "store-api.hh"
|
#include "store-api.hh"
|
||||||
#include "command.hh"
|
#include "command.hh"
|
||||||
#include "tarball.hh"
|
#include "tarball.hh"
|
||||||
|
#include "fetch-to-store.hh"
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
|
@ -19,7 +20,7 @@ MixEvalArgs::MixEvalArgs()
|
||||||
.description = "Pass the value *expr* as the argument *name* to Nix functions.",
|
.description = "Pass the value *expr* as the argument *name* to Nix functions.",
|
||||||
.category = category,
|
.category = category,
|
||||||
.labels = {"name", "expr"},
|
.labels = {"name", "expr"},
|
||||||
.handler = {[&](std::string name, std::string expr) { autoArgs[name] = 'E' + expr; }}
|
.handler = {[&](std::string name, std::string expr) { autoArgs.insert_or_assign(name, AutoArg{AutoArgExpr(expr)}); }}
|
||||||
});
|
});
|
||||||
|
|
||||||
addFlag({
|
addFlag({
|
||||||
|
@ -27,7 +28,24 @@ MixEvalArgs::MixEvalArgs()
|
||||||
.description = "Pass the string *string* as the argument *name* to Nix functions.",
|
.description = "Pass the string *string* as the argument *name* to Nix functions.",
|
||||||
.category = category,
|
.category = category,
|
||||||
.labels = {"name", "string"},
|
.labels = {"name", "string"},
|
||||||
.handler = {[&](std::string name, std::string s) { autoArgs[name] = 'S' + s; }},
|
.handler = {[&](std::string name, std::string s) { autoArgs.insert_or_assign(name, AutoArg{AutoArgString(s)}); }},
|
||||||
|
});
|
||||||
|
|
||||||
|
addFlag({
|
||||||
|
.longName = "arg-from-file",
|
||||||
|
.description = "Pass the contents of file *path* as the argument *name* to Nix functions.",
|
||||||
|
.category = category,
|
||||||
|
.labels = {"name", "path"},
|
||||||
|
.handler = {[&](std::string name, std::string path) { autoArgs.insert_or_assign(name, AutoArg{AutoArgFile(path)}); }},
|
||||||
|
.completer = completePath
|
||||||
|
});
|
||||||
|
|
||||||
|
addFlag({
|
||||||
|
.longName = "arg-from-stdin",
|
||||||
|
.description = "Pass the contents of stdin as the argument *name* to Nix functions.",
|
||||||
|
.category = category,
|
||||||
|
.labels = {"name"},
|
||||||
|
.handler = {[&](std::string name) { autoArgs.insert_or_assign(name, AutoArg{AutoArgStdin{}}); }},
|
||||||
});
|
});
|
||||||
|
|
||||||
addFlag({
|
addFlag({
|
||||||
|
@ -153,22 +171,33 @@ MixEvalArgs::MixEvalArgs()
|
||||||
Bindings * MixEvalArgs::getAutoArgs(EvalState & state)
|
Bindings * MixEvalArgs::getAutoArgs(EvalState & state)
|
||||||
{
|
{
|
||||||
auto res = state.buildBindings(autoArgs.size());
|
auto res = state.buildBindings(autoArgs.size());
|
||||||
for (auto & i : autoArgs) {
|
for (auto & [name, arg] : autoArgs) {
|
||||||
auto v = state.allocValue();
|
auto v = state.allocValue();
|
||||||
if (i.second[0] == 'E')
|
std::visit(overloaded {
|
||||||
state.mkThunk_(*v, state.parseExprFromString(i.second.substr(1), state.rootPath(CanonPath::fromCwd())));
|
[&](const AutoArgExpr & arg) {
|
||||||
else
|
state.mkThunk_(*v, state.parseExprFromString(arg.expr, state.rootPath(".")));
|
||||||
v->mkString(((std::string_view) i.second).substr(1));
|
},
|
||||||
res.insert(state.symbols.create(i.first), v);
|
[&](const AutoArgString & arg) {
|
||||||
|
v->mkString(arg.s);
|
||||||
|
},
|
||||||
|
[&](const AutoArgFile & arg) {
|
||||||
|
v->mkString(readFile(arg.path));
|
||||||
|
},
|
||||||
|
[&](const AutoArgStdin & arg) {
|
||||||
|
v->mkString(readFile(STDIN_FILENO));
|
||||||
|
}
|
||||||
|
}, arg);
|
||||||
|
res.insert(state.symbols.create(name), v);
|
||||||
}
|
}
|
||||||
return res.finish();
|
return res.finish();
|
||||||
}
|
}
|
||||||
|
|
||||||
SourcePath lookupFileArg(EvalState & state, std::string_view s, CanonPath baseDir)
|
SourcePath lookupFileArg(EvalState & state, std::string_view s, const Path * baseDir)
|
||||||
{
|
{
|
||||||
if (EvalSettings::isPseudoUrl(s)) {
|
if (EvalSettings::isPseudoUrl(s)) {
|
||||||
auto storePath = fetchers::downloadTarball(
|
auto accessor = fetchers::downloadTarball(
|
||||||
state.store, EvalSettings::resolvePseudoUrl(s), "source", false).storePath;
|
EvalSettings::resolvePseudoUrl(s)).accessor;
|
||||||
|
auto storePath = fetchToStore(*state.store, SourcePath(accessor), FetchMode::Copy);
|
||||||
return state.rootPath(CanonPath(state.store->toRealPath(storePath)));
|
return state.rootPath(CanonPath(state.store->toRealPath(storePath)));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -185,7 +214,7 @@ SourcePath lookupFileArg(EvalState & state, std::string_view s, CanonPath baseDi
|
||||||
}
|
}
|
||||||
|
|
||||||
else
|
else
|
||||||
return state.rootPath(CanonPath(s, baseDir));
|
return state.rootPath(baseDir ? absPath(s, *baseDir) : absPath(s));
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,6 +6,8 @@
|
||||||
#include "common-args.hh"
|
#include "common-args.hh"
|
||||||
#include "search-path.hh"
|
#include "search-path.hh"
|
||||||
|
|
||||||
|
#include <filesystem>
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
class Store;
|
class Store;
|
||||||
|
@ -26,9 +28,16 @@ struct MixEvalArgs : virtual Args, virtual MixRepair
|
||||||
std::optional<std::string> evalStoreUrl;
|
std::optional<std::string> evalStoreUrl;
|
||||||
|
|
||||||
private:
|
private:
|
||||||
std::map<std::string, std::string> autoArgs;
|
struct AutoArgExpr { std::string expr; };
|
||||||
|
struct AutoArgString { std::string s; };
|
||||||
|
struct AutoArgFile { std::filesystem::path path; };
|
||||||
|
struct AutoArgStdin { };
|
||||||
|
|
||||||
|
using AutoArg = std::variant<AutoArgExpr, AutoArgString, AutoArgFile, AutoArgStdin>;
|
||||||
|
|
||||||
|
std::map<std::string, AutoArg> autoArgs;
|
||||||
};
|
};
|
||||||
|
|
||||||
SourcePath lookupFileArg(EvalState & state, std::string_view s, CanonPath baseDir = CanonPath::fromCwd());
|
SourcePath lookupFileArg(EvalState & state, std::string_view s, const Path * baseDir = nullptr);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
#include "editor-for.hh"
|
#include "editor-for.hh"
|
||||||
#include "environment-variables.hh"
|
#include "environment-variables.hh"
|
||||||
|
#include "source-path.hh"
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
|
@ -16,7 +17,7 @@ Strings editorFor(const SourcePath & file, uint32_t line)
|
||||||
editor.find("vim") != std::string::npos ||
|
editor.find("vim") != std::string::npos ||
|
||||||
editor.find("kak") != std::string::npos))
|
editor.find("kak") != std::string::npos))
|
||||||
args.push_back(fmt("+%d", line));
|
args.push_back(fmt("+%d", line));
|
||||||
args.push_back(path->abs());
|
args.push_back(path->string());
|
||||||
return args;
|
return args;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
///@file
|
///@file
|
||||||
|
|
||||||
#include "types.hh"
|
#include "types.hh"
|
||||||
#include "input-accessor.hh"
|
#include "source-path.hh"
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
|
|
|
@ -58,22 +58,22 @@ DerivedPathsWithInfo InstallableAttrPath::toDerivedPaths()
|
||||||
|
|
||||||
Bindings & autoArgs = *cmd.getAutoArgs(*state);
|
Bindings & autoArgs = *cmd.getAutoArgs(*state);
|
||||||
|
|
||||||
DrvInfos drvInfos;
|
PackageInfos packageInfos;
|
||||||
getDerivations(*state, *v, "", autoArgs, drvInfos, false);
|
getDerivations(*state, *v, "", autoArgs, packageInfos, false);
|
||||||
|
|
||||||
// Backward compatibility hack: group results by drvPath. This
|
// Backward compatibility hack: group results by drvPath. This
|
||||||
// helps keep .all output together.
|
// helps keep .all output together.
|
||||||
std::map<StorePath, OutputsSpec> byDrvPath;
|
std::map<StorePath, OutputsSpec> byDrvPath;
|
||||||
|
|
||||||
for (auto & drvInfo : drvInfos) {
|
for (auto & packageInfo : packageInfos) {
|
||||||
auto drvPath = drvInfo.queryDrvPath();
|
auto drvPath = packageInfo.queryDrvPath();
|
||||||
if (!drvPath)
|
if (!drvPath)
|
||||||
throw Error("'%s' is not a derivation", what());
|
throw Error("'%s' is not a derivation", what());
|
||||||
|
|
||||||
auto newOutputs = std::visit(overloaded {
|
auto newOutputs = std::visit(overloaded {
|
||||||
[&](const ExtendedOutputsSpec::Default & d) -> OutputsSpec {
|
[&](const ExtendedOutputsSpec::Default & d) -> OutputsSpec {
|
||||||
std::set<std::string> outputsToInstall;
|
std::set<std::string> outputsToInstall;
|
||||||
for (auto & output : drvInfo.queryOutputs(false, true))
|
for (auto & output : packageInfo.queryOutputs(false, true))
|
||||||
outputsToInstall.insert(output.first);
|
outputsToInstall.insert(output.first);
|
||||||
return OutputsSpec::Names { std::move(outputsToInstall) };
|
return OutputsSpec::Names { std::move(outputsToInstall) };
|
||||||
},
|
},
|
||||||
|
|
|
@ -52,7 +52,7 @@ Value * InstallableFlake::getFlakeOutputs(EvalState & state, const flake::Locked
|
||||||
auto aOutputs = vFlake->attrs->get(state.symbols.create("outputs"));
|
auto aOutputs = vFlake->attrs->get(state.symbols.create("outputs"));
|
||||||
assert(aOutputs);
|
assert(aOutputs);
|
||||||
|
|
||||||
state.forceValue(*aOutputs->value, [&]() { return aOutputs->value->determinePos(noPos); });
|
state.forceValue(*aOutputs->value, aOutputs->value->determinePos(noPos));
|
||||||
|
|
||||||
return aOutputs->value;
|
return aOutputs->value;
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,5 +1,6 @@
|
||||||
#include "installable-value.hh"
|
#include "installable-value.hh"
|
||||||
#include "eval-cache.hh"
|
#include "eval-cache.hh"
|
||||||
|
#include "fetch-to-store.hh"
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
|
@ -44,7 +45,7 @@ ref<InstallableValue> InstallableValue::require(ref<Installable> installable)
|
||||||
std::optional<DerivedPathWithInfo> InstallableValue::trySinglePathToDerivedPaths(Value & v, const PosIdx pos, std::string_view errorCtx)
|
std::optional<DerivedPathWithInfo> InstallableValue::trySinglePathToDerivedPaths(Value & v, const PosIdx pos, std::string_view errorCtx)
|
||||||
{
|
{
|
||||||
if (v.type() == nPath) {
|
if (v.type() == nPath) {
|
||||||
auto storePath = v.path().fetchToStore(state->store);
|
auto storePath = fetchToStore(*state->store, v.path(), FetchMode::Copy);
|
||||||
return {{
|
return {{
|
||||||
.path = DerivedPath::Opaque {
|
.path = DerivedPath::Opaque {
|
||||||
.path = std::move(storePath),
|
.path = std::move(storePath),
|
||||||
|
|
|
@ -6,7 +6,7 @@
|
||||||
|
|
||||||
namespace nix {
|
namespace nix {
|
||||||
|
|
||||||
struct DrvInfo;
|
struct PackageInfo;
|
||||||
struct SourceExprCommand;
|
struct SourceExprCommand;
|
||||||
|
|
||||||
namespace eval_cache { class EvalCache; class AttrCursor; }
|
namespace eval_cache { class EvalCache; class AttrCursor; }
|
||||||
|
|
|
@ -21,6 +21,7 @@
|
||||||
#include "url.hh"
|
#include "url.hh"
|
||||||
#include "registry.hh"
|
#include "registry.hh"
|
||||||
#include "build-result.hh"
|
#include "build-result.hh"
|
||||||
|
#include "fs-input-accessor.hh"
|
||||||
|
|
||||||
#include <regex>
|
#include <regex>
|
||||||
#include <queue>
|
#include <queue>
|
||||||
|
@ -150,7 +151,7 @@ MixFlakeOptions::MixFlakeOptions()
|
||||||
.category = category,
|
.category = category,
|
||||||
.labels = {"flake-lock-path"},
|
.labels = {"flake-lock-path"},
|
||||||
.handler = {[&](std::string lockFilePath) {
|
.handler = {[&](std::string lockFilePath) {
|
||||||
lockFlags.referenceLockFilePath = lockFilePath;
|
lockFlags.referenceLockFilePath = getUnfilteredRootPath(CanonPath(absPath(lockFilePath)));
|
||||||
}},
|
}},
|
||||||
.completer = completePath
|
.completer = completePath
|
||||||
});
|
});
|
||||||
|
@ -325,9 +326,10 @@ void SourceExprCommand::completeInstallable(AddCompletions & completions, std::s
|
||||||
|
|
||||||
evalSettings.pureEval = false;
|
evalSettings.pureEval = false;
|
||||||
auto state = getEvalState();
|
auto state = getEvalState();
|
||||||
Expr *e = state->parseExprFromFile(
|
auto e =
|
||||||
resolveExprPath(state->checkSourcePath(lookupFileArg(*state, *file)))
|
state->parseExprFromFile(
|
||||||
);
|
resolveExprPath(
|
||||||
|
lookupFileArg(*state, *file)));
|
||||||
|
|
||||||
Value root;
|
Value root;
|
||||||
state->eval(e, root);
|
state->eval(e, root);
|
||||||
|
@ -518,10 +520,10 @@ ref<eval_cache::EvalCache> openEvalCache(
|
||||||
EvalState & state,
|
EvalState & state,
|
||||||
std::shared_ptr<flake::LockedFlake> lockedFlake)
|
std::shared_ptr<flake::LockedFlake> lockedFlake)
|
||||||
{
|
{
|
||||||
auto fingerprint = lockedFlake->getFingerprint();
|
auto fingerprint = lockedFlake->getFingerprint(state.store);
|
||||||
return make_ref<nix::eval_cache::EvalCache>(
|
return make_ref<nix::eval_cache::EvalCache>(
|
||||||
evalSettings.useEvalCache && evalSettings.pureEval
|
evalSettings.useEvalCache && evalSettings.pureEval
|
||||||
? std::optional { std::cref(fingerprint) }
|
? fingerprint
|
||||||
: std::nullopt,
|
: std::nullopt,
|
||||||
state,
|
state,
|
||||||
[&state, lockedFlake]()
|
[&state, lockedFlake]()
|
||||||
|
@ -597,12 +599,15 @@ Installables SourceExprCommand::parseInstallables(
|
||||||
state->eval(e, *vFile);
|
state->eval(e, *vFile);
|
||||||
}
|
}
|
||||||
else if (file)
|
else if (file)
|
||||||
state->evalFile(lookupFileArg(*state, *file, CanonPath::fromCwd(getCommandBaseDir())), *vFile);
|
auto dir = absPath(getCommandBaseDir());
|
||||||
|
state->evalFile(lookupFileArg(*state, *file, &dir), *vFile);
|
||||||
else if (callPackageFile) {
|
else if (callPackageFile) {
|
||||||
auto e = state->parseExprFromString(fmt("(import <nixpkgs> {}).callPackage %s {}", CanonPath::fromCwd(*callPackageFile)), state->rootPath(CanonPath::fromCwd()));
|
auto dir = absPath(getCommandBaseDir());
|
||||||
|
auto fileLoc = absPath(*callPackageFile);
|
||||||
|
auto e = state->parseExprFromString(fmt("(import <nixpkgs> {}).callPackage %s {}", &fileLoc), state->rootPath(&dir));
|
||||||
state->eval(e, *vFile);
|
state->eval(e, *vFile);
|
||||||
} else {
|
} else {
|
||||||
CanonPath dir(CanonPath::fromCwd(getCommandBaseDir()));
|
Path dir = absPath(getCommandBaseDir());
|
||||||
auto e = state->parseExprFromString(*expr, state->rootPath(dir));
|
auto e = state->parseExprFromString(*expr, state->rootPath(dir));
|
||||||
state->eval(e, *vFile);
|
state->eval(e, *vFile);
|
||||||
}
|
}
|
||||||
|
@ -859,7 +864,7 @@ BuiltPaths Installable::toBuiltPaths(
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
StorePathSet Installable::toStorePaths(
|
StorePathSet Installable::toStorePathSet(
|
||||||
ref<Store> evalStore,
|
ref<Store> evalStore,
|
||||||
ref<Store> store,
|
ref<Store> store,
|
||||||
Realise mode, OperateOn operateOn,
|
Realise mode, OperateOn operateOn,
|
||||||
|
@ -873,13 +878,27 @@ StorePathSet Installable::toStorePaths(
|
||||||
return outPaths;
|
return outPaths;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
StorePaths Installable::toStorePaths(
|
||||||
|
ref<Store> evalStore,
|
||||||
|
ref<Store> store,
|
||||||
|
Realise mode, OperateOn operateOn,
|
||||||
|
const Installables & installables)
|
||||||
|
{
|
||||||
|
StorePaths outPaths;
|
||||||
|
for (auto & path : toBuiltPaths(evalStore, store, mode, operateOn, installables)) {
|
||||||
|
auto thisOutPaths = path.outPaths();
|
||||||
|
outPaths.insert(outPaths.end(), thisOutPaths.begin(), thisOutPaths.end());
|
||||||
|
}
|
||||||
|
return outPaths;
|
||||||
|
}
|
||||||
|
|
||||||
StorePath Installable::toStorePath(
|
StorePath Installable::toStorePath(
|
||||||
ref<Store> evalStore,
|
ref<Store> evalStore,
|
||||||
ref<Store> store,
|
ref<Store> store,
|
||||||
Realise mode, OperateOn operateOn,
|
Realise mode, OperateOn operateOn,
|
||||||
ref<Installable> installable)
|
ref<Installable> installable)
|
||||||
{
|
{
|
||||||
auto paths = toStorePaths(evalStore, store, mode, operateOn, {installable});
|
auto paths = toStorePathSet(evalStore, store, mode, operateOn, {installable});
|
||||||
|
|
||||||
if (paths.size() != 1)
|
if (paths.size() != 1)
|
||||||
throw Error("argument '%s' should evaluate to one store path", installable->what());
|
throw Error("argument '%s' should evaluate to one store path", installable->what());
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Reference in a new issue