mirror of
https://github.com/privatevoid-net/nix-super.git
synced 2025-02-08 19:27:18 +02:00
Merge remote-tracking branch 'nixos/master'
This commit is contained in:
commit
8a4b98e309
36 changed files with 333 additions and 532 deletions
2
.github/ISSUE_TEMPLATE/feature_request.md
vendored
2
.github/ISSUE_TEMPLATE/feature_request.md
vendored
|
@ -2,7 +2,7 @@
|
||||||
name: Feature request
|
name: Feature request
|
||||||
about: Suggest an idea for this project
|
about: Suggest an idea for this project
|
||||||
title: ''
|
title: ''
|
||||||
labels: improvement
|
labels: feature
|
||||||
assignees: ''
|
assignees: ''
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
28
.github/ISSUE_TEMPLATE/missing_documentation.md
vendored
Normal file
28
.github/ISSUE_TEMPLATE/missing_documentation.md
vendored
Normal file
|
@ -0,0 +1,28 @@
|
||||||
|
---
|
||||||
|
name: Missing or incorrect documentation
|
||||||
|
about:
|
||||||
|
title: ''
|
||||||
|
labels: 'documentation'
|
||||||
|
assignees: ''
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Problem
|
||||||
|
|
||||||
|
<!-- describe your problem -->
|
||||||
|
|
||||||
|
## Checklist
|
||||||
|
|
||||||
|
<!-- make sure this issue is not redundant or obsolete -->
|
||||||
|
|
||||||
|
- [ ] checked [latest Nix manual] \([source])
|
||||||
|
- [ ] checked [open documentation issues and pull requests] for possible duplicates
|
||||||
|
|
||||||
|
[latest Nix manual]: https://nixos.org/manual/nix/unstable/
|
||||||
|
[source]: https://github.com/NixOS/nix/tree/master/doc/manual/src
|
||||||
|
[open documentation issues and pull requests]: https://github.com/NixOS/nix/labels/documentation
|
||||||
|
|
||||||
|
## Proposal
|
||||||
|
|
||||||
|
<!-- propose a solution -->
|
||||||
|
|
6
.github/workflows/backport.yml
vendored
6
.github/workflows/backport.yml
vendored
|
@ -2,9 +2,15 @@ name: Backport
|
||||||
on:
|
on:
|
||||||
pull_request_target:
|
pull_request_target:
|
||||||
types: [closed, labeled]
|
types: [closed, labeled]
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
jobs:
|
jobs:
|
||||||
backport:
|
backport:
|
||||||
name: Backport Pull Request
|
name: Backport Pull Request
|
||||||
|
permissions:
|
||||||
|
# for zeebe-io/backport-action
|
||||||
|
contents: write
|
||||||
|
pull-requests: write
|
||||||
if: github.repository_owner == 'NixOS' && github.event.pull_request.merged == true && (github.event_name != 'labeled' || startsWith('backport', github.event.label.name))
|
if: github.repository_owner == 'NixOS' && github.event.pull_request.merged == true && (github.event_name != 'labeled' || startsWith('backport', github.event.label.name))
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
|
|
42
.github/workflows/ci.yml
vendored
42
.github/workflows/ci.yml
vendored
|
@ -9,7 +9,7 @@ permissions: read-all
|
||||||
jobs:
|
jobs:
|
||||||
|
|
||||||
tests:
|
tests:
|
||||||
needs: [check_cachix]
|
needs: [check_secrets]
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
os: [ubuntu-latest, macos-latest]
|
os: [ubuntu-latest, macos-latest]
|
||||||
|
@ -22,30 +22,34 @@ jobs:
|
||||||
- uses: cachix/install-nix-action@v17
|
- uses: cachix/install-nix-action@v17
|
||||||
- run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
|
- run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
|
||||||
- uses: cachix/cachix-action@v10
|
- uses: cachix/cachix-action@v10
|
||||||
if: needs.check_cachix.outputs.secret == 'true'
|
if: needs.check_secrets.outputs.cachix == 'true'
|
||||||
with:
|
with:
|
||||||
name: '${{ env.CACHIX_NAME }}'
|
name: '${{ env.CACHIX_NAME }}'
|
||||||
signingKey: '${{ secrets.CACHIX_SIGNING_KEY }}'
|
signingKey: '${{ secrets.CACHIX_SIGNING_KEY }}'
|
||||||
authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}'
|
authToken: '${{ secrets.CACHIX_AUTH_TOKEN }}'
|
||||||
- run: nix --experimental-features 'nix-command flakes' flake check -L
|
- run: nix --experimental-features 'nix-command flakes' flake check -L
|
||||||
|
|
||||||
check_cachix:
|
check_secrets:
|
||||||
permissions:
|
permissions:
|
||||||
contents: none
|
contents: none
|
||||||
name: Cachix secret present for installer tests
|
name: Check Cachix and Docker secrets present for installer tests
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
outputs:
|
outputs:
|
||||||
secret: ${{ steps.secret.outputs.secret }}
|
cachix: ${{ steps.secret.outputs.cachix }}
|
||||||
|
docker: ${{ steps.secret.outputs.docker }}
|
||||||
steps:
|
steps:
|
||||||
- name: Check for Cachix secret
|
- name: Check for secrets
|
||||||
id: secret
|
id: secret
|
||||||
env:
|
env:
|
||||||
_CACHIX_SECRETS: ${{ secrets.CACHIX_SIGNING_KEY }}${{ secrets.CACHIX_AUTH_TOKEN }}
|
_CACHIX_SECRETS: ${{ secrets.CACHIX_SIGNING_KEY }}${{ secrets.CACHIX_AUTH_TOKEN }}
|
||||||
run: echo "::set-output name=secret::${{ env._CACHIX_SECRETS != '' }}"
|
_DOCKER_SECRETS: ${{ secrets.DOCKERHUB_USERNAME }}${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
run: |
|
||||||
|
echo "::set-output name=cachix::${{ env._CACHIX_SECRETS != '' }}"
|
||||||
|
echo "::set-output name=docker::${{ env._DOCKER_SECRETS != '' }}"
|
||||||
|
|
||||||
installer:
|
installer:
|
||||||
needs: [tests, check_cachix]
|
needs: [tests, check_secrets]
|
||||||
if: github.event_name == 'push' && needs.check_cachix.outputs.secret == 'true'
|
if: github.event_name == 'push' && needs.check_secrets.outputs.cachix == 'true'
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
outputs:
|
outputs:
|
||||||
installerURL: ${{ steps.prepare-installer.outputs.installerURL }}
|
installerURL: ${{ steps.prepare-installer.outputs.installerURL }}
|
||||||
|
@ -64,8 +68,8 @@ jobs:
|
||||||
run: scripts/prepare-installer-for-github-actions
|
run: scripts/prepare-installer-for-github-actions
|
||||||
|
|
||||||
installer_test:
|
installer_test:
|
||||||
needs: [installer, check_cachix]
|
needs: [installer, check_secrets]
|
||||||
if: github.event_name == 'push' && needs.check_cachix.outputs.secret == 'true'
|
if: github.event_name == 'push' && needs.check_secrets.outputs.cachix == 'true'
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
os: [ubuntu-latest, macos-latest]
|
os: [ubuntu-latest, macos-latest]
|
||||||
|
@ -77,14 +81,22 @@ jobs:
|
||||||
with:
|
with:
|
||||||
install_url: '${{needs.installer.outputs.installerURL}}'
|
install_url: '${{needs.installer.outputs.installerURL}}'
|
||||||
install_options: "--tarball-url-prefix https://${{ env.CACHIX_NAME }}.cachix.org/serve"
|
install_options: "--tarball-url-prefix https://${{ env.CACHIX_NAME }}.cachix.org/serve"
|
||||||
- run: nix-instantiate -E 'builtins.currentTime' --eval
|
- run: sudo apt install fish zsh
|
||||||
|
if: matrix.os == 'ubuntu-latest'
|
||||||
|
- run: brew install fish
|
||||||
|
if: matrix.os == 'macos-latest'
|
||||||
|
- run: exec bash -c "nix-instantiate -E 'builtins.currentTime' --eval"
|
||||||
|
- run: exec sh -c "nix-instantiate -E 'builtins.currentTime' --eval"
|
||||||
|
- run: exec zsh -c "nix-instantiate -E 'builtins.currentTime' --eval"
|
||||||
|
- run: exec fish -c "nix-instantiate -E 'builtins.currentTime' --eval"
|
||||||
|
|
||||||
docker_push_image:
|
docker_push_image:
|
||||||
needs: [check_cachix, tests]
|
needs: [check_secrets, tests]
|
||||||
if: >-
|
if: >-
|
||||||
github.event_name == 'push' &&
|
github.event_name == 'push' &&
|
||||||
github.ref_name == 'master' &&
|
github.ref_name == 'master' &&
|
||||||
needs.check_cachix.outputs.secret == 'true'
|
needs.check_secrets.outputs.cachix == 'true' &&
|
||||||
|
needs.check_secrets.outputs.docker == 'true'
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
|
@ -94,7 +106,7 @@ jobs:
|
||||||
- run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
|
- run: echo CACHIX_NAME="$(echo $GITHUB_REPOSITORY-install-tests | tr "[A-Z]/" "[a-z]-")" >> $GITHUB_ENV
|
||||||
- run: echo NIX_VERSION="$(nix --experimental-features 'nix-command flakes' eval .\#default.version | tr -d \")" >> $GITHUB_ENV
|
- run: echo NIX_VERSION="$(nix --experimental-features 'nix-command flakes' eval .\#default.version | tr -d \")" >> $GITHUB_ENV
|
||||||
- uses: cachix/cachix-action@v10
|
- uses: cachix/cachix-action@v10
|
||||||
if: needs.check_cachix.outputs.secret == 'true'
|
if: needs.check_secrets.outputs.cachix == 'true'
|
||||||
with:
|
with:
|
||||||
name: '${{ env.CACHIX_NAME }}'
|
name: '${{ env.CACHIX_NAME }}'
|
||||||
signingKey: '${{ secrets.CACHIX_SIGNING_KEY }}'
|
signingKey: '${{ secrets.CACHIX_SIGNING_KEY }}'
|
||||||
|
|
2
.gitignore
vendored
2
.gitignore
vendored
|
@ -27,6 +27,8 @@ perl/Makefile.config
|
||||||
# /scripts/
|
# /scripts/
|
||||||
/scripts/nix-profile.sh
|
/scripts/nix-profile.sh
|
||||||
/scripts/nix-profile-daemon.sh
|
/scripts/nix-profile-daemon.sh
|
||||||
|
/scripts/nix-profile.fish
|
||||||
|
/scripts/nix-profile-daemon.fish
|
||||||
|
|
||||||
# /src/libexpr/
|
# /src/libexpr/
|
||||||
/src/libexpr/lexer-tab.cc
|
/src/libexpr/lexer-tab.cc
|
||||||
|
|
|
@ -1,3 +1,35 @@
|
||||||
|
diff --git a/darwin_stop_world.c b/darwin_stop_world.c
|
||||||
|
index 3dbaa3fb..36a1d1f7 100644
|
||||||
|
--- a/darwin_stop_world.c
|
||||||
|
+++ b/darwin_stop_world.c
|
||||||
|
@@ -352,6 +352,7 @@ GC_INNER void GC_push_all_stacks(void)
|
||||||
|
int nthreads = 0;
|
||||||
|
word total_size = 0;
|
||||||
|
mach_msg_type_number_t listcount = (mach_msg_type_number_t)THREAD_TABLE_SZ;
|
||||||
|
+ size_t stack_limit;
|
||||||
|
if (!EXPECT(GC_thr_initialized, TRUE))
|
||||||
|
GC_thr_init();
|
||||||
|
|
||||||
|
@@ -407,6 +408,19 @@ GC_INNER void GC_push_all_stacks(void)
|
||||||
|
GC_push_all_stack_sections(lo, hi, p->traced_stack_sect);
|
||||||
|
}
|
||||||
|
if (altstack_lo) {
|
||||||
|
+ // When a thread goes into a coroutine, we lose its original sp until
|
||||||
|
+ // control flow returns to the thread.
|
||||||
|
+ // While in the coroutine, the sp points outside the thread stack,
|
||||||
|
+ // so we can detect this and push the entire thread stack instead,
|
||||||
|
+ // as an approximation.
|
||||||
|
+ // We assume that the coroutine has similarly added its entire stack.
|
||||||
|
+ // This could be made accurate by cooperating with the application
|
||||||
|
+ // via new functions and/or callbacks.
|
||||||
|
+ stack_limit = pthread_get_stacksize_np(p->id);
|
||||||
|
+ if (altstack_lo >= altstack_hi || altstack_lo < altstack_hi - stack_limit) { // sp outside stack
|
||||||
|
+ altstack_lo = altstack_hi - stack_limit;
|
||||||
|
+ }
|
||||||
|
+
|
||||||
|
total_size += altstack_hi - altstack_lo;
|
||||||
|
GC_push_all_stack(altstack_lo, altstack_hi);
|
||||||
|
}
|
||||||
diff --git a/pthread_stop_world.c b/pthread_stop_world.c
|
diff --git a/pthread_stop_world.c b/pthread_stop_world.c
|
||||||
index 4b2c429..1fb4c52 100644
|
index 4b2c429..1fb4c52 100644
|
||||||
--- a/pthread_stop_world.c
|
--- a/pthread_stop_world.c
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
{ command, renderLinks ? false }:
|
{ command }:
|
||||||
|
|
||||||
with builtins;
|
with builtins;
|
||||||
with import ./utils.nix;
|
with import ./utils.nix;
|
||||||
|
@ -21,9 +21,7 @@ let
|
||||||
listCommands = cmds:
|
listCommands = cmds:
|
||||||
concatStrings (map (name:
|
concatStrings (map (name:
|
||||||
"* "
|
"* "
|
||||||
+ (if renderLinks
|
+ "[`${command} ${name}`](./${appendName filename name}.md)"
|
||||||
then "[`${command} ${name}`](./${appendName filename name}.md)"
|
|
||||||
else "`${command} ${name}`")
|
|
||||||
+ " - ${cmds.${name}.description}\n")
|
+ " - ${cmds.${name}.description}\n")
|
||||||
(attrNames cmds));
|
(attrNames cmds));
|
||||||
in
|
in
|
||||||
|
|
|
@ -50,7 +50,7 @@ $(d)/src/SUMMARY.md: $(d)/src/SUMMARY.md.in $(d)/src/command-ref/new-cli
|
||||||
|
|
||||||
$(d)/src/command-ref/new-cli: $(d)/nix.json $(d)/generate-manpage.nix $(bindir)/nix
|
$(d)/src/command-ref/new-cli: $(d)/nix.json $(d)/generate-manpage.nix $(bindir)/nix
|
||||||
@rm -rf $@
|
@rm -rf $@
|
||||||
$(trace-gen) $(nix-eval) --write-to $@ --expr 'import doc/manual/generate-manpage.nix { command = builtins.readFile $<; renderLinks = true; }'
|
$(trace-gen) $(nix-eval) --write-to $@ --expr 'import doc/manual/generate-manpage.nix { command = builtins.readFile $<; }'
|
||||||
|
|
||||||
$(d)/src/command-ref/conf-file.md: $(d)/conf-file.json $(d)/generate-options.nix $(d)/src/command-ref/conf-file-prefix.md $(bindir)/nix
|
$(d)/src/command-ref/conf-file.md: $(d)/conf-file.json $(d)/generate-options.nix $(d)/src/command-ref/conf-file-prefix.md $(bindir)/nix
|
||||||
@cat doc/manual/src/command-ref/conf-file-prefix.md > $@.tmp
|
@cat doc/manual/src/command-ref/conf-file-prefix.md > $@.tmp
|
||||||
|
@ -96,7 +96,7 @@ doc/manual/generated/man1/nix3-manpages: $(d)/src/command-ref/new-cli
|
||||||
if [[ $$name = SUMMARY ]]; then continue; fi; \
|
if [[ $$name = SUMMARY ]]; then continue; fi; \
|
||||||
printf "Title: %s\n\n" "$$name" > $$tmpFile; \
|
printf "Title: %s\n\n" "$$name" > $$tmpFile; \
|
||||||
cat $$i >> $$tmpFile; \
|
cat $$i >> $$tmpFile; \
|
||||||
lowdown -sT man -M section=1 $$tmpFile -o $(DESTDIR)$$(dirname $@)/$$name.1; \
|
lowdown -sT man --nroff-nolinks -M section=1 $$tmpFile -o $(DESTDIR)$$(dirname $@)/$$name.1; \
|
||||||
rm $$tmpFile; \
|
rm $$tmpFile; \
|
||||||
done
|
done
|
||||||
@touch $@
|
@touch $@
|
||||||
|
|
|
@ -59,14 +59,6 @@
|
||||||
@manpages@
|
@manpages@
|
||||||
- [Files](command-ref/files.md)
|
- [Files](command-ref/files.md)
|
||||||
- [nix.conf](command-ref/conf-file.md)
|
- [nix.conf](command-ref/conf-file.md)
|
||||||
<!--
|
|
||||||
- [Architecture](architecture/architecture.md)
|
|
||||||
- [Store](architecture/store/store.md)
|
|
||||||
- [Closure](architecture/store/store/closure.md)
|
|
||||||
- [Build system terminology](architecture/store/store/build-system-terminology.md)
|
|
||||||
- [Store Path](architecture/store/path.md)
|
|
||||||
- [File System Object](architecture/store/fso.md)
|
|
||||||
-->
|
|
||||||
- [Glossary](glossary.md)
|
- [Glossary](glossary.md)
|
||||||
- [Contributing](contributing/contributing.md)
|
- [Contributing](contributing/contributing.md)
|
||||||
- [Hacking](contributing/hacking.md)
|
- [Hacking](contributing/hacking.md)
|
||||||
|
|
|
@ -1,79 +0,0 @@
|
||||||
# Architecture
|
|
||||||
|
|
||||||
*(This chapter is unstable and a work in progress. Incoming links may rot.)*
|
|
||||||
|
|
||||||
This chapter describes how Nix works.
|
|
||||||
It should help users understand why Nix behaves as it does, and it should help developers understand how to modify Nix and how to write similar tools.
|
|
||||||
|
|
||||||
## Overview
|
|
||||||
|
|
||||||
Nix consists of [hierarchical layers][layer-architecture].
|
|
||||||
|
|
||||||
```
|
|
||||||
+-----------------------------------------------------------------+
|
|
||||||
| Nix |
|
|
||||||
| [ commmand line interface ]------, |
|
|
||||||
| | | |
|
|
||||||
| evaluates | |
|
|
||||||
| | manages |
|
|
||||||
| V | |
|
|
||||||
| [ configuration language ] | |
|
|
||||||
| | | |
|
|
||||||
| +-----------------------------|-------------------V-----------+ |
|
|
||||||
| | store evaluates to | |
|
|
||||||
| | | | |
|
|
||||||
| | referenced by V builds | |
|
|
||||||
| | [ build input ] ---> [ build plan ] ---> [ build result ] | |
|
|
||||||
| | | |
|
|
||||||
| +-------------------------------------------------------------+ |
|
|
||||||
+-----------------------------------------------------------------+
|
|
||||||
```
|
|
||||||
|
|
||||||
At the top is the [command line interface](../command-ref/command-ref.md), translating from invocations of Nix executables to interactions with the underlying layers.
|
|
||||||
|
|
||||||
Below that is the [Nix expression language](../expressions/expression-language.md), a [purely functional][purely-functional-programming] configuration language.
|
|
||||||
It is used to compose expressions which ultimately evaluate to self-contained *build plans*, used to derive *build results* from referenced *build inputs*.
|
|
||||||
|
|
||||||
The command line and Nix language are what users interact with most.
|
|
||||||
|
|
||||||
> **Note**
|
|
||||||
> The Nix language itself does not have a notion of *packages* or *configurations*.
|
|
||||||
> As far as we are concerned here, the inputs and results of a build plan are just data.
|
|
||||||
|
|
||||||
Underlying these is the [Nix store](./store/store.md), a mechanism to keep track of build plans, data, and references between them.
|
|
||||||
It can also execute build plans to produce new data.
|
|
||||||
|
|
||||||
A build plan is a series of *build tasks*.
|
|
||||||
Each build task has a special build input which is used as *build instructions*.
|
|
||||||
The result of a build task can be input to another build task.
|
|
||||||
|
|
||||||
```
|
|
||||||
+-----------------------------------------------------------------------------------------+
|
|
||||||
| store |
|
|
||||||
| ................................................. |
|
|
||||||
| : build plan : |
|
|
||||||
| : : |
|
|
||||||
| [ build input ]-----instructions-, : |
|
|
||||||
| : | : |
|
|
||||||
| : v : |
|
|
||||||
| [ build input ]----------->[ build task ]--instructions-, : |
|
|
||||||
| : | : |
|
|
||||||
| : | : |
|
|
||||||
| : v : |
|
|
||||||
| : [ build task ]----->[ build result ] |
|
|
||||||
| [ build input ]-----instructions-, ^ : |
|
|
||||||
| : | | : |
|
|
||||||
| : v | : |
|
|
||||||
| [ build input ]----------->[ build task ]---------------' : |
|
|
||||||
| : ^ : |
|
|
||||||
| : | : |
|
|
||||||
| [ build input ]------------------' : |
|
|
||||||
| : : |
|
|
||||||
| : : |
|
|
||||||
| :...............................................: |
|
|
||||||
| |
|
|
||||||
+-----------------------------------------------------------------------------------------+
|
|
||||||
```
|
|
||||||
|
|
||||||
[layer-architecture]: https://en.m.wikipedia.org/wiki/Multitier_architecture#Layers
|
|
||||||
[purely-functional-programming]: https://en.m.wikipedia.org/wiki/Purely_functional_programming
|
|
|
@ -1,69 +0,0 @@
|
||||||
# File System Object
|
|
||||||
|
|
||||||
The Nix store uses a simple file system model for the data it holds in [store objects](store.md#store-object).
|
|
||||||
|
|
||||||
Every file system object is one of the following:
|
|
||||||
|
|
||||||
- File: an executable flag, and arbitrary data for contents
|
|
||||||
- Directory: mapping of names to child file system objects
|
|
||||||
- [Symbolic link][symlink]: may point anywhere.
|
|
||||||
|
|
||||||
We call a store object's outermost file system object the *root*.
|
|
||||||
|
|
||||||
data FileSystemObject
|
|
||||||
= File { isExecutable :: Bool, contents :: Bytes }
|
|
||||||
| Directory { entries :: Map FileName FileSystemObject }
|
|
||||||
| SymLink { target :: Path }
|
|
||||||
|
|
||||||
Examples:
|
|
||||||
|
|
||||||
- a directory with contents
|
|
||||||
|
|
||||||
/nix/store/<hash>-hello-2.10
|
|
||||||
├── bin
|
|
||||||
│ └── hello
|
|
||||||
└── share
|
|
||||||
├── info
|
|
||||||
│ └── hello.info
|
|
||||||
└── man
|
|
||||||
└── man1
|
|
||||||
└── hello.1.gz
|
|
||||||
|
|
||||||
- a directory with relative symlink and other contents
|
|
||||||
|
|
||||||
/nix/store/<hash>-go-1.16.9
|
|
||||||
├── bin -> share/go/bin
|
|
||||||
├── nix-support/
|
|
||||||
└── share/
|
|
||||||
|
|
||||||
- a directory with absolute symlink
|
|
||||||
|
|
||||||
/nix/store/d3k...-nodejs
|
|
||||||
└── nix_node -> /nix/store/f20...-nodejs-10.24.
|
|
||||||
|
|
||||||
A bare file or symlink can be a root file system object.
|
|
||||||
Examples:
|
|
||||||
|
|
||||||
/nix/store/<hash>-hello-2.10.tar.gz
|
|
||||||
|
|
||||||
/nix/store/4j5...-pkg-config-wrapper-0.29.2-doc -> /nix/store/i99...-pkg-config-0.29.2-doc
|
|
||||||
|
|
||||||
Symlinks pointing outside of their own root or to a store object without a matching reference are allowed, but might not function as intended.
|
|
||||||
Examples:
|
|
||||||
|
|
||||||
- an arbitrarily symlinked file may change or not exist at all
|
|
||||||
|
|
||||||
/nix/store/<hash>-foo
|
|
||||||
└── foo -> /home/foo
|
|
||||||
|
|
||||||
- if a symlink to a store path was not automatically created by Nix, it may be invalid or get invalidated when the store object is deleted
|
|
||||||
|
|
||||||
/nix/store/<hash>-bar
|
|
||||||
└── bar -> /nix/store/abc...-foo
|
|
||||||
|
|
||||||
Nix file system objects do not support [hard links][hardlink]:
|
|
||||||
each file system object which is not the root has exactly one parent and one name.
|
|
||||||
However, as store objects are immutable, an underlying file system can use hard links for optimization.
|
|
||||||
|
|
||||||
[symlink]: https://en.m.wikipedia.org/wiki/Symbolic_link
|
|
||||||
[hardlink]: https://en.m.wikipedia.org/wiki/Hard_link
|
|
|
@ -1,105 +0,0 @@
|
||||||
# Store Path
|
|
||||||
|
|
||||||
Nix implements [references](store.md#reference) to [store objects](store.md#store-object) as *store paths*.
|
|
||||||
|
|
||||||
Store paths are pairs of
|
|
||||||
|
|
||||||
- a 20-byte [digest](#digest) for identification
|
|
||||||
- a symbolic name for people to read.
|
|
||||||
|
|
||||||
Example:
|
|
||||||
|
|
||||||
- digest: `b6gvzjyb2pg0kjfwrjmg1vfhh54ad73z`
|
|
||||||
- name: `firefox-33.1`
|
|
||||||
|
|
||||||
It is rendered to a file system path as the concatenation of
|
|
||||||
|
|
||||||
- [store directory](#store-directory)
|
|
||||||
- path-separator (`/`)
|
|
||||||
- [digest](#digest) rendered in a custom variant of [base-32](https://en.m.wikipedia.org/wiki/Base32) (20 arbitrary bytes become 32 ASCII characters)
|
|
||||||
- hyphen (`-`)
|
|
||||||
- name
|
|
||||||
|
|
||||||
Example:
|
|
||||||
|
|
||||||
/nix/store/b6gvzjyb2pg0kjfwrjmg1vfhh54ad73z-firefox-33.1
|
|
||||||
|--------| |------------------------------| |----------|
|
|
||||||
store directory digest name
|
|
||||||
|
|
||||||
## Store Directory
|
|
||||||
|
|
||||||
Every [store](./store.md) has a store directory.
|
|
||||||
|
|
||||||
If the store has a [file system representation](./store.md#files-and-processes), this directory contains the store’s [file system objects](#file-system-object), which can be addressed by [store paths](#store-path).
|
|
||||||
|
|
||||||
This means a store path is not just derived from the referenced store object itself, but depends on the store the store object is in.
|
|
||||||
|
|
||||||
> **Note**
|
|
||||||
> The store directory defaults to `/nix/store`, but is in principle arbitrary.
|
|
||||||
|
|
||||||
It is important which store a given store object belongs to:
|
|
||||||
Files in the store object can contain store paths, and processes may read these paths.
|
|
||||||
Nix can only guarantee [referential integrity](store/closure.md) if store paths do not cross store boundaries.
|
|
||||||
|
|
||||||
Therefore one can only copy store objects to a different store if
|
|
||||||
|
|
||||||
- the source and target stores' directories match
|
|
||||||
|
|
||||||
or
|
|
||||||
|
|
||||||
- the store object in question has no references, that is, contains no store paths.
|
|
||||||
|
|
||||||
One cannot copy a store object to a store with a different store directory.
|
|
||||||
Instead, it has to be rebuilt, together with all its dependencies.
|
|
||||||
It is in general not enough to replace the store directory string in file contents, as this may render executables unusable by invalidating their internal offsets or checksums.
|
|
||||||
|
|
||||||
# Digest
|
|
||||||
|
|
||||||
In a [store path](#store-path), the [digest][digest] is the output of a [cryptographic hash function][hash] of either all *inputs* involved in building the referenced store object or its actual *contents*.
|
|
||||||
|
|
||||||
Store objects are therefore said to be either [input-addressed](#input-addressing) or [content-addressed](#content-addressing).
|
|
||||||
|
|
||||||
> **Historical Note**
|
|
||||||
> The 20 byte restriction is because originally digests were [SHA-1][sha-1] hashes.
|
|
||||||
> Nix now uses [SHA-256][sha-256], and longer hashes are still reduced to 20 bytes for compatibility.
|
|
||||||
|
|
||||||
[digest]: https://en.m.wiktionary.org/wiki/digest#Noun
|
|
||||||
[hash]: https://en.m.wikipedia.org/wiki/Cryptographic_hash_function
|
|
||||||
[sha-1]: https://en.m.wikipedia.org/wiki/SHA-1
|
|
||||||
[sha-256]: https://en.m.wikipedia.org/wiki/SHA-256
|
|
||||||
|
|
||||||
### Reference scanning
|
|
||||||
|
|
||||||
When a new store object is built, Nix scans its file contents for store paths to construct its set of references.
|
|
||||||
|
|
||||||
The special format of a store path's [digest](#digest) allows reliably detecting it among arbitrary data.
|
|
||||||
Nix uses the [closure](store.md#closure) of build inputs to derive the list of allowed store paths, to avoid false positives.
|
|
||||||
|
|
||||||
This way, scanning files captures run time dependencies without the user having to declare them explicitly.
|
|
||||||
Doing it at build time and persisting references in the store object avoids repeating this time-consuming operation.
|
|
||||||
|
|
||||||
> **Note**
|
|
||||||
> In practice, it is sometimes still necessary for users to declare certain dependencies explicitly, if they are to be preserved in the build result's closure.
|
|
||||||
This depends on the specifics of the software to build and run.
|
|
||||||
>
|
|
||||||
> For example, Java programs are compressed after compilation, which obfuscates any store paths they may refer to and prevents Nix from automatically detecting them.
|
|
||||||
|
|
||||||
## Input Addressing
|
|
||||||
|
|
||||||
Input addressing means that the digest derives from how the store object was produced, namely its build inputs and build plan.
|
|
||||||
|
|
||||||
To compute the hash of a store object one needs a deterministic serialisation, i.e., a binary string representation which only changes if the store object changes.
|
|
||||||
|
|
||||||
Nix has a custom serialisation format called Nix Archive (NAR)
|
|
||||||
|
|
||||||
Store object references of this sort can *not* be validated from the content of the store object.
|
|
||||||
Rather, a cryptographic signature has to be used to indicate that someone is vouching for the store object really being produced from a build plan with that digest.
|
|
||||||
|
|
||||||
## Content Addressing
|
|
||||||
|
|
||||||
Content addressing means that the digest derives from the store object's contents, namely its file system objects and references.
|
|
||||||
If one knows content addressing was used, one can recalculate the reference and thus verify the store object.
|
|
||||||
|
|
||||||
Content addressing is currently only used for the special cases of source files and "fixed-output derivations", where the contents of a store object are known in advance.
|
|
||||||
Content addressing of build results is still an [experimental feature subject to some restrictions](https://github.com/tweag/rfcs/blob/cas-rfc/rfcs/0062-content-addressed-paths.md).
|
|
||||||
|
|
|
@ -1,151 +0,0 @@
|
||||||
# Store
|
|
||||||
|
|
||||||
A Nix store is a collection of *store objects* with references between them.
|
|
||||||
It supports operations to manipulate that collection.
|
|
||||||
|
|
||||||
The following concept map is a graphical outline of this chapter.
|
|
||||||
Arrows indicate suggested reading order.
|
|
||||||
|
|
||||||
```
|
|
||||||
,--------------[ store ]----------------,
|
|
||||||
| | |
|
|
||||||
v v v
|
|
||||||
[ store object ] [ closure ]--, [ operations ]
|
|
||||||
| | | | | |
|
|
||||||
v | | v v |
|
|
||||||
[ files and processes ] | | [ garbage collection ] |
|
|
||||||
/ \ | | |
|
|
||||||
v v | v v
|
|
||||||
[ file system object ] [ store path ] | [ derivation ]--->[ building ]
|
|
||||||
| ^ | | |
|
|
||||||
v | v v |
|
|
||||||
[ digest ]----' [ reference scanning ]<------------'
|
|
||||||
/ \
|
|
||||||
v v
|
|
||||||
[ input addressing ] [ content addressing ]
|
|
||||||
```
|
|
||||||
|
|
||||||
## Store Object
|
|
||||||
|
|
||||||
A store object can hold
|
|
||||||
|
|
||||||
- arbitrary *data*
|
|
||||||
- *references* to other store objects.
|
|
||||||
|
|
||||||
Store objects can be build inputs, build results, or build tasks.
|
|
||||||
|
|
||||||
Store objects are [immutable][immutable-object]: once created, they do not change until they are deleted.
|
|
||||||
|
|
||||||
## Reference
|
|
||||||
|
|
||||||
A store object reference is an [opaque][opaque-data-type], [unique identifier][unique-identifier]:
|
|
||||||
The only way to obtain references is by adding or building store objects.
|
|
||||||
A reference will always point to exactly one store object.
|
|
||||||
|
|
||||||
## Operations
|
|
||||||
|
|
||||||
A Nix store can *add*, *retrieve*, and *delete* store objects.
|
|
||||||
|
|
||||||
[ data ]
|
|
||||||
|
|
|
||||||
V
|
|
||||||
[ store ] ---> add ----> [ store' ]
|
|
||||||
|
|
|
||||||
V
|
|
||||||
[ reference ]
|
|
||||||
|
|
||||||
<!-- -->
|
|
||||||
|
|
||||||
[ reference ]
|
|
||||||
|
|
|
||||||
V
|
|
||||||
[ store ] ---> get
|
|
||||||
|
|
|
||||||
V
|
|
||||||
[ store object ]
|
|
||||||
|
|
||||||
<!-- -->
|
|
||||||
|
|
||||||
[ reference ]
|
|
||||||
|
|
|
||||||
V
|
|
||||||
[ store ] --> delete --> [ store' ]
|
|
||||||
|
|
||||||
|
|
||||||
It can *perform builds*, that is, create new store objects by transforming build inputs into build outputs, using instructions from the build tasks.
|
|
||||||
|
|
||||||
|
|
||||||
[ reference ]
|
|
||||||
|
|
|
||||||
V
|
|
||||||
[ store ] --> build --(maybe)--> [ store' ]
|
|
||||||
|
|
|
||||||
V
|
|
||||||
[ reference ]
|
|
||||||
|
|
||||||
|
|
||||||
As it keeps track of references, it can [garbage-collect][garbage-collection] unused store objects.
|
|
||||||
|
|
||||||
|
|
||||||
[ store ] --> collect garbage --> [ store' ]
|
|
||||||
|
|
||||||
## Files and Processes
|
|
||||||
|
|
||||||
Nix maps between its store model and the [Unix paradigm][unix-paradigm] of [files and processes][file-descriptor], by encoding immutable store objects and opaque identifiers as file system primitives: files and directories, and paths.
|
|
||||||
That allows processes to resolve references contained in files and thus access the contents of store objects.
|
|
||||||
|
|
||||||
Store objects are therefore implemented as the pair of
|
|
||||||
|
|
||||||
- a [file system object](fso.md) for data
|
|
||||||
- a set of [store paths](path.md) for references.
|
|
||||||
|
|
||||||
[unix-paradigm]: https://en.m.wikipedia.org/wiki/Everything_is_a_file
|
|
||||||
[file-descriptor]: https://en.m.wikipedia.org/wiki/File_descriptor
|
|
||||||
|
|
||||||
The following diagram shows a radical simplification of how Nix interacts with the operating system:
|
|
||||||
It uses files as build inputs, and build outputs are files again.
|
|
||||||
On the operating system, files can be run as processes, which in turn operate on files.
|
|
||||||
A build function also amounts to an operating system process (not depicted).
|
|
||||||
|
|
||||||
```
|
|
||||||
+-----------------------------------------------------------------+
|
|
||||||
| Nix |
|
|
||||||
| [ commmand line interface ]------, |
|
|
||||||
| | | |
|
|
||||||
| evaluates | |
|
|
||||||
| | manages |
|
|
||||||
| V | |
|
|
||||||
| [ configuration language ] | |
|
|
||||||
| | | |
|
|
||||||
| +-----------------------------|-------------------V-----------+ |
|
|
||||||
| | store evaluates to | |
|
|
||||||
| | | | |
|
|
||||||
| | referenced by V builds | |
|
|
||||||
| | [ build input ] ---> [ build plan ] ---> [ build result ] | |
|
|
||||||
| | ^ | | |
|
|
||||||
| +---------|----------------------------------------|----------+ |
|
|
||||||
+-----------|----------------------------------------|------------+
|
|
||||||
| |
|
|
||||||
file system object store path
|
|
||||||
| |
|
|
||||||
+-----------|----------------------------------------|------------+
|
|
||||||
| operating system +------------+ | |
|
|
||||||
| '------------ | | <-----------' |
|
|
||||||
| | file | |
|
|
||||||
| ,-- | | <-, |
|
|
||||||
| | +------------+ | |
|
|
||||||
| execute as | | read, write, execute |
|
|
||||||
| | +------------+ | |
|
|
||||||
| '-> | process | --' |
|
|
||||||
| +------------+ |
|
|
||||||
+-----------------------------------------------------------------+
|
|
||||||
```
|
|
||||||
|
|
||||||
There exist different types of stores, which all follow this model.
|
|
||||||
Examples:
|
|
||||||
- store on the local file system
|
|
||||||
- remote store accessible via SSH
|
|
||||||
- binary cache store accessible via HTTP
|
|
||||||
|
|
||||||
To make store objects accessible to processes, stores ultimately have to expose store objects through the file system.
|
|
||||||
|
|
|
@ -1,32 +0,0 @@
|
||||||
# A [Rosetta stone][rosetta-stone] for build system terminology
|
|
||||||
|
|
||||||
The Nix store's design is comparable to other build systems.
|
|
||||||
Usage of terms is, for historic reasons, not entirely consistent within the Nix ecosystem, and still subject to slow change.
|
|
||||||
|
|
||||||
The following translation table points out similarities and equivalent terms, to help clarify their meaning and inform consistent use in the future.
|
|
||||||
|
|
||||||
| generic build system | Nix | [Bazel][bazel] | [Build Systems à la Carte][bsalc] | programming language |
|
|
||||||
| -------------------------------- | ---------------- | -------------------------------------------------------------------- | --------------------------------- | ------------------------ |
|
|
||||||
| data (build input, build result) | store object | [artifact][bazel-artifact] | value | value |
|
|
||||||
| build instructions | builder | ([depends on action type][bazel-actions]) | function | function |
|
|
||||||
| build task | derivation | [action][bazel-action] | `Task` | [thunk][thunk] |
|
|
||||||
| build plan | derivation graph | [action graph][bazel-action-graph], [build graph][bazel-build-graph] | `Tasks` | [call graph][call-graph] |
|
|
||||||
| build | build | build | application of `Build` | evaluation |
|
|
||||||
| persistence layer | store | [action cache][bazel-action-cache] | `Store` | heap |
|
|
||||||
|
|
||||||
All of these systems share features of [declarative programming][declarative-programming] languages, a key insight first put forward by Eelco Dolstra et al. in [Imposing a Memory Management Discipline on Software Deployment][immdsd] (2004), elaborated in his PhD thesis [The Purely Functional Software Deployment Model][phd-thesis] (2006), and further refined by Andrey Mokhov et al. in [Build Systems à la Carte][bsalc] (2018).
|
|
||||||
|
|
||||||
[rosetta-stone]: https://en.m.wikipedia.org/wiki/Rosetta_Stone
|
|
||||||
[bazel]: https://bazel.build/start/bazel-intro
|
|
||||||
[bazel-artifact]: https://bazel.build/reference/glossary#artifact
|
|
||||||
[bazel-actions]: https://docs.bazel.build/versions/main/skylark/lib/actions.html
|
|
||||||
[bazel-action]: https://bazel.build/reference/glossary#action
|
|
||||||
[bazel-action-graph]: https://bazel.build/reference/glossary#action-graph
|
|
||||||
[bazel-build-graph]: https://bazel.build/reference/glossary#build-graph
|
|
||||||
[bazel-action-cache]: https://bazel.build/reference/glossary#action-cache
|
|
||||||
[thunk]: https://en.m.wikipedia.org/wiki/Thunk
|
|
||||||
[call-graph]: https://en.m.wikipedia.org/wiki/Call_graph
|
|
||||||
[declarative-programming]: https://en.m.wikipedia.org/wiki/Declarative_programming
|
|
||||||
[immdsd]: https://edolstra.github.io/pubs/immdsd-icse2004-final.pdf
|
|
||||||
[phd-thesis]: https://edolstra.github.io/pubs/phd-thesis.pdf
|
|
||||||
[bsalc]: https://www.microsoft.com/en-us/research/uploads/prod/2018/03/build-systems.pdf
|
|
|
@ -1,29 +0,0 @@
|
||||||
# Closure
|
|
||||||
|
|
||||||
Nix stores ensure [referential integrity][referential-integrity]: for each store object in the store, all the store objects it references must also be in the store.
|
|
||||||
|
|
||||||
The set of all store objects reachable by following references from a given initial set of store objects is called a *closure*.
|
|
||||||
|
|
||||||
Adding, building, copying and deleting store objects must be done in a way that preserves referential integrity:
|
|
||||||
|
|
||||||
- A newly added store object cannot have references, unless it is a build task.
|
|
||||||
|
|
||||||
- Build results must only refer to store objects in the closure of the build inputs.
|
|
||||||
|
|
||||||
Building a store object will add appropriate references, according to the build task.
|
|
||||||
|
|
||||||
- Store objects being copied must refer to objects already in the destination store.
|
|
||||||
|
|
||||||
Recursive copying must either proceed in dependency order or be atomic.
|
|
||||||
|
|
||||||
- We can only safely delete store objects which are not reachable from any reference still in use.
|
|
||||||
|
|
||||||
<!-- more details in section on garbage collection, link to it once it exists -->
|
|
||||||
|
|
||||||
[referential-integrity]: https://en.m.wikipedia.org/wiki/Referential_integrity
|
|
||||||
[garbage-collection]: https://en.m.wikipedia.org/wiki/Garbage_collection_(computer_science)
|
|
||||||
[immutable-object]: https://en.m.wikipedia.org/wiki/Immutable_object
|
|
||||||
[opaque-data-type]: https://en.m.wikipedia.org/wiki/Opaque_data_type
|
|
||||||
[unique-identifier]: https://en.m.wikipedia.org/wiki/Unique_identifier
|
|
||||||
|
|
||||||
|
|
|
@ -1,2 +1,7 @@
|
||||||
# Release X.Y (202?-??-??)
|
# Release X.Y (202?-??-??)
|
||||||
|
|
||||||
|
* `<nix/fetchurl.nix>` now accepts an additional argument `impure` which
|
||||||
|
defaults to `false`. If it is set to `true`, the `hash` and `sha256`
|
||||||
|
arguments will be ignored and the resulting derivation will have
|
||||||
|
`__impure` set to `true`, making it an impure derivation.
|
||||||
|
|
||||||
|
|
|
@ -260,6 +260,7 @@
|
||||||
echo "file binary-dist $fn" >> $out/nix-support/hydra-build-products
|
echo "file binary-dist $fn" >> $out/nix-support/hydra-build-products
|
||||||
tar cvfJ $fn \
|
tar cvfJ $fn \
|
||||||
--owner=0 --group=0 --mode=u+rw,uga+r \
|
--owner=0 --group=0 --mode=u+rw,uga+r \
|
||||||
|
--mtime='1970-01-01' \
|
||||||
--absolute-names \
|
--absolute-names \
|
||||||
--hard-dereference \
|
--hard-dereference \
|
||||||
--transform "s,$TMPDIR/install,$dir/install," \
|
--transform "s,$TMPDIR/install,$dir/install," \
|
||||||
|
|
|
@ -10,14 +10,15 @@ function _nix() {
|
||||||
local -a suggestions
|
local -a suggestions
|
||||||
declare -a suggestions
|
declare -a suggestions
|
||||||
for suggestion in ${res:1}; do
|
for suggestion in ${res:1}; do
|
||||||
# FIXME: This doesn't work properly if the suggestion word contains a `:`
|
suggestions+=("${suggestion%% *}")
|
||||||
# itself
|
|
||||||
suggestions+="${suggestion/ /:}"
|
|
||||||
done
|
done
|
||||||
|
local -a args
|
||||||
if [[ "$tpe" == filenames ]]; then
|
if [[ "$tpe" == filenames ]]; then
|
||||||
compadd -f
|
args+=('-f')
|
||||||
|
elif [[ "$tpe" == attrs ]]; then
|
||||||
|
args+=('-S' '')
|
||||||
fi
|
fi
|
||||||
_describe 'nix' suggestions
|
compadd -J nix "${args[@]}" -a suggestions
|
||||||
}
|
}
|
||||||
|
|
||||||
_nix "$@"
|
_nix "$@"
|
||||||
|
|
|
@ -37,6 +37,19 @@ readonly PROFILE_TARGETS=("/etc/bashrc" "/etc/profile.d/nix.sh" "/etc/zshrc" "/e
|
||||||
readonly PROFILE_BACKUP_SUFFIX=".backup-before-nix"
|
readonly PROFILE_BACKUP_SUFFIX=".backup-before-nix"
|
||||||
readonly PROFILE_NIX_FILE="$NIX_ROOT/var/nix/profiles/default/etc/profile.d/nix-daemon.sh"
|
readonly PROFILE_NIX_FILE="$NIX_ROOT/var/nix/profiles/default/etc/profile.d/nix-daemon.sh"
|
||||||
|
|
||||||
|
# Fish has different syntax than zsh/bash, treat it separate
|
||||||
|
readonly PROFILE_FISH_SUFFIX="conf.d/nix.fish"
|
||||||
|
readonly PROFILE_FISH_PREFIXES=(
|
||||||
|
# each of these are common values of $__fish_sysconf_dir,
|
||||||
|
# under which Fish will look for a file named
|
||||||
|
# $PROFILE_FISH_SUFFIX.
|
||||||
|
"/etc/fish" # standard
|
||||||
|
"/usr/local/etc/fish" # their installer .pkg for macOS
|
||||||
|
"/opt/homebrew/etc/fish" # homebrew
|
||||||
|
"/opt/local/etc/fish" # macports
|
||||||
|
)
|
||||||
|
readonly PROFILE_NIX_FILE_FISH="$NIX_ROOT/var/nix/profiles/default/etc/profile.d/nix-daemon.fish"
|
||||||
|
|
||||||
readonly NIX_INSTALLED_NIX="@nix@"
|
readonly NIX_INSTALLED_NIX="@nix@"
|
||||||
readonly NIX_INSTALLED_CACERT="@cacert@"
|
readonly NIX_INSTALLED_CACERT="@cacert@"
|
||||||
#readonly NIX_INSTALLED_NIX="/nix/store/j8dbv5w6jl34caywh2ygdy88knx1mdf7-nix-2.3.6"
|
#readonly NIX_INSTALLED_NIX="/nix/store/j8dbv5w6jl34caywh2ygdy88knx1mdf7-nix-2.3.6"
|
||||||
|
@ -362,7 +375,7 @@ finish_fail() {
|
||||||
finish_cleanup
|
finish_cleanup
|
||||||
|
|
||||||
failure <<EOF
|
failure <<EOF
|
||||||
Jeeze, something went wrong. If you can take all the output and open
|
Oh no, something went wrong. If you can take all the output and open
|
||||||
an issue, we'd love to fix the problem so nobody else has this issue.
|
an issue, we'd love to fix the problem so nobody else has this issue.
|
||||||
|
|
||||||
:(
|
:(
|
||||||
|
@ -810,7 +823,7 @@ EOF
|
||||||
fi
|
fi
|
||||||
|
|
||||||
_sudo "to load data for the first time in to the Nix Database" \
|
_sudo "to load data for the first time in to the Nix Database" \
|
||||||
"$NIX_INSTALLED_NIX/bin/nix-store" --load-db < ./.reginfo
|
HOME="$ROOT_HOME" "$NIX_INSTALLED_NIX/bin/nix-store" --load-db < ./.reginfo
|
||||||
|
|
||||||
echo " Just finished getting the nix database ready."
|
echo " Just finished getting the nix database ready."
|
||||||
)
|
)
|
||||||
|
@ -828,6 +841,19 @@ fi
|
||||||
EOF
|
EOF
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# Fish has differing syntax
|
||||||
|
fish_source_lines() {
|
||||||
|
cat <<EOF
|
||||||
|
|
||||||
|
# Nix
|
||||||
|
if test -e '$PROFILE_NIX_FILE_FISH'
|
||||||
|
. '$PROFILE_NIX_FILE_FISH'
|
||||||
|
end
|
||||||
|
# End Nix
|
||||||
|
|
||||||
|
EOF
|
||||||
|
}
|
||||||
|
|
||||||
configure_shell_profile() {
|
configure_shell_profile() {
|
||||||
task "Setting up shell profiles: ${PROFILE_TARGETS[*]}"
|
task "Setting up shell profiles: ${PROFILE_TARGETS[*]}"
|
||||||
for profile_target in "${PROFILE_TARGETS[@]}"; do
|
for profile_target in "${PROFILE_TARGETS[@]}"; do
|
||||||
|
@ -849,6 +875,27 @@ configure_shell_profile() {
|
||||||
tee -a "$profile_target"
|
tee -a "$profile_target"
|
||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
|
|
||||||
|
task "Setting up shell profiles for Fish with with ${PROFILE_FISH_SUFFIX} inside ${PROFILE_FISH_PREFIXES[*]}"
|
||||||
|
for fish_prefix in "${PROFILE_FISH_PREFIXES[@]}"; do
|
||||||
|
if [ ! -d "$fish_prefix" ]; then
|
||||||
|
# this specific prefix (ie: /etc/fish) is very likely to exist
|
||||||
|
# if Fish is installed with this sysconfdir.
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
|
||||||
|
profile_target="${fish_prefix}/${PROFILE_FISH_SUFFIX}"
|
||||||
|
conf_dir=$(dirname "$profile_target")
|
||||||
|
if [ ! -d "$conf_dir" ]; then
|
||||||
|
_sudo "create $conf_dir for our Fish hook" \
|
||||||
|
mkdir "$conf_dir"
|
||||||
|
fi
|
||||||
|
|
||||||
|
fish_source_lines \
|
||||||
|
| _sudo "write nix-daemon settings to $profile_target" \
|
||||||
|
tee "$profile_target"
|
||||||
|
done
|
||||||
|
|
||||||
# TODO: should we suggest '. $PROFILE_NIX_FILE'? It would get them on
|
# TODO: should we suggest '. $PROFILE_NIX_FILE'? It would get them on
|
||||||
# their way less disruptively, but a counter-argument is that they won't
|
# their way less disruptively, but a counter-argument is that they won't
|
||||||
# immediately notice if something didn't get set up right?
|
# immediately notice if something didn't get set up right?
|
||||||
|
|
|
@ -209,31 +209,50 @@ if [ -z "$NIX_INSTALLER_NO_CHANNEL_ADD" ]; then
|
||||||
fi
|
fi
|
||||||
|
|
||||||
added=
|
added=
|
||||||
p=$HOME/.nix-profile/etc/profile.d/nix.sh
|
p=
|
||||||
|
p_sh=$HOME/.nix-profile/etc/profile.d/nix.sh
|
||||||
|
p_fish=$HOME/.nix-profile/etc/profile.d/nix.fish
|
||||||
if [ -z "$NIX_INSTALLER_NO_MODIFY_PROFILE" ]; then
|
if [ -z "$NIX_INSTALLER_NO_MODIFY_PROFILE" ]; then
|
||||||
# Make the shell source nix.sh during login.
|
# Make the shell source nix.sh during login.
|
||||||
for i in .bash_profile .bash_login .profile; do
|
for i in .bash_profile .bash_login .profile; do
|
||||||
fn="$HOME/$i"
|
fn="$HOME/$i"
|
||||||
if [ -w "$fn" ]; then
|
if [ -w "$fn" ]; then
|
||||||
if ! grep -q "$p" "$fn"; then
|
if ! grep -q "$p_sh" "$fn"; then
|
||||||
echo "modifying $fn..." >&2
|
echo "modifying $fn..." >&2
|
||||||
printf '\nif [ -e %s ]; then . %s; fi # added by Nix installer\n' "$p" "$p" >> "$fn"
|
printf '\nif [ -e %s ]; then . %s; fi # added by Nix installer\n' "$p_sh" "$p_sh" >> "$fn"
|
||||||
fi
|
fi
|
||||||
added=1
|
added=1
|
||||||
|
p=${p_sh}
|
||||||
break
|
break
|
||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
for i in .zshenv .zshrc; do
|
for i in .zshenv .zshrc; do
|
||||||
fn="$HOME/$i"
|
fn="$HOME/$i"
|
||||||
if [ -w "$fn" ]; then
|
if [ -w "$fn" ]; then
|
||||||
if ! grep -q "$p" "$fn"; then
|
if ! grep -q "$p_sh" "$fn"; then
|
||||||
echo "modifying $fn..." >&2
|
echo "modifying $fn..." >&2
|
||||||
printf '\nif [ -e %s ]; then . %s; fi # added by Nix installer\n' "$p" "$p" >> "$fn"
|
printf '\nif [ -e %s ]; then . %s; fi # added by Nix installer\n' "$p_sh" "$p_sh" >> "$fn"
|
||||||
fi
|
fi
|
||||||
added=1
|
added=1
|
||||||
|
p=${p_sh}
|
||||||
break
|
break
|
||||||
fi
|
fi
|
||||||
done
|
done
|
||||||
|
|
||||||
|
if [ -d "$HOME/.config/fish" ]; then
|
||||||
|
fishdir=$HOME/.config/fish/conf.d
|
||||||
|
if [ ! -d "$fishdir" ]; then
|
||||||
|
mkdir -p "$fishdir"
|
||||||
|
fi
|
||||||
|
|
||||||
|
fn="$fishdir/nix.fish"
|
||||||
|
echo "placing $fn..." >&2
|
||||||
|
printf '\nif test -e %s; . %s; end # added by Nix installer\n' "$p_fish" "$p_fish" > "$fn"
|
||||||
|
added=1
|
||||||
|
p=${p_fish}
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
p=${p_sh}
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [ -z "$added" ]; then
|
if [ -z "$added" ]; then
|
||||||
|
|
|
@ -6,6 +6,8 @@ noinst-scripts += $(nix_noinst_scripts)
|
||||||
profiledir = $(sysconfdir)/profile.d
|
profiledir = $(sysconfdir)/profile.d
|
||||||
|
|
||||||
$(eval $(call install-file-as, $(d)/nix-profile.sh, $(profiledir)/nix.sh, 0644))
|
$(eval $(call install-file-as, $(d)/nix-profile.sh, $(profiledir)/nix.sh, 0644))
|
||||||
|
$(eval $(call install-file-as, $(d)/nix-profile.fish, $(profiledir)/nix.fish, 0644))
|
||||||
$(eval $(call install-file-as, $(d)/nix-profile-daemon.sh, $(profiledir)/nix-daemon.sh, 0644))
|
$(eval $(call install-file-as, $(d)/nix-profile-daemon.sh, $(profiledir)/nix-daemon.sh, 0644))
|
||||||
|
$(eval $(call install-file-as, $(d)/nix-profile-daemon.fish, $(profiledir)/nix-daemon.fish, 0644))
|
||||||
|
|
||||||
clean-files += $(nix_noinst_scripts)
|
clean-files += $(nix_noinst_scripts)
|
||||||
|
|
35
scripts/nix-profile-daemon.fish.in
Normal file
35
scripts/nix-profile-daemon.fish.in
Normal file
|
@ -0,0 +1,35 @@
|
||||||
|
# Only execute this file once per shell.
|
||||||
|
if test -n "$__ETC_PROFILE_NIX_SOURCED"
|
||||||
|
return
|
||||||
|
end
|
||||||
|
|
||||||
|
set __ETC_PROFILE_NIX_SOURCED 1
|
||||||
|
|
||||||
|
set --export NIX_PROFILES "@localstatedir@/nix/profiles/default $HOME/.nix-profile"
|
||||||
|
|
||||||
|
# Set $NIX_SSL_CERT_FILE so that Nixpkgs applications like curl work.
|
||||||
|
if test -n "$NIX_SSH_CERT_FILE"
|
||||||
|
: # Allow users to override the NIX_SSL_CERT_FILE
|
||||||
|
else if test -e /etc/ssl/certs/ca-certificates.crt # NixOS, Ubuntu, Debian, Gentoo, Arch
|
||||||
|
set --export NIX_SSL_CERT_FILE /etc/ssl/certs/ca-certificates.crt
|
||||||
|
else if test -e /etc/ssl/ca-bundle.pem # openSUSE Tumbleweed
|
||||||
|
set --export NIX_SSL_CERT_FILE /etc/ssl/ca-bundle.pem
|
||||||
|
else if test -e /etc/ssl/certs/ca-bundle.crt # Old NixOS
|
||||||
|
set --export NIX_SSL_CERT_FILE /etc/ssl/certs/ca-bundle.crt
|
||||||
|
else if test -e /etc/pki/tls/certs/ca-bundle.crt # Fedora, CentOS
|
||||||
|
set --export NIX_SSL_CERT_FILE /etc/pki/tls/certs/ca-bundle.crt
|
||||||
|
else if test -e "$NIX_LINK/etc/ssl/certs/ca-bundle.crt" # fall back to cacert in Nix profile
|
||||||
|
set --export NIX_SSL_CERT_FILE "$NIX_LINK/etc/ssl/certs/ca-bundle.crt"
|
||||||
|
else if test -e "$NIX_LINK/etc/ca-bundle.crt" # old cacert in Nix profile
|
||||||
|
set --export NIX_SSL_CERT_FILE "$NIX_LINK/etc/ca-bundle.crt"
|
||||||
|
else
|
||||||
|
# Fall back to what is in the nix profiles, favouring whatever is defined last.
|
||||||
|
for i in $NIX_PROFILES
|
||||||
|
if test -e "$i/etc/ssl/certs/ca-bundle.crt"
|
||||||
|
set --export NIX_SSL_CERT_FILE "$i/etc/ssl/certs/ca-bundle.crt"
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
fish_add_path --prepend --global "@localstatedir@/nix/profiles/default/bin"
|
||||||
|
fish_add_path --prepend --global "$HOME/.nix-profile/bin"
|
35
scripts/nix-profile.fish.in
Normal file
35
scripts/nix-profile.fish.in
Normal file
|
@ -0,0 +1,35 @@
|
||||||
|
if test -n "$HOME" && test -n "$USER"
|
||||||
|
|
||||||
|
# Set up the per-user profile.
|
||||||
|
|
||||||
|
set NIX_LINK $HOME/.nix-profile
|
||||||
|
|
||||||
|
# Set up environment.
|
||||||
|
# This part should be kept in sync with nixpkgs:nixos/modules/programs/environment.nix
|
||||||
|
set --export NIX_PROFILES "@localstatedir@/nix/profiles/default $HOME/.nix-profile"
|
||||||
|
|
||||||
|
# Set $NIX_SSL_CERT_FILE so that Nixpkgs applications like curl work.
|
||||||
|
if test -n "$NIX_SSH_CERT_FILE"
|
||||||
|
: # Allow users to override the NIX_SSL_CERT_FILE
|
||||||
|
else if test -e /etc/ssl/certs/ca-certificates.crt # NixOS, Ubuntu, Debian, Gentoo, Arch
|
||||||
|
set --export NIX_SSL_CERT_FILE /etc/ssl/certs/ca-certificates.crt
|
||||||
|
else if test -e /etc/ssl/ca-bundle.pem # openSUSE Tumbleweed
|
||||||
|
set --export NIX_SSL_CERT_FILE /etc/ssl/ca-bundle.pem
|
||||||
|
else if test -e /etc/ssl/certs/ca-bundle.crt # Old NixOS
|
||||||
|
set --export NIX_SSL_CERT_FILE /etc/ssl/certs/ca-bundle.crt
|
||||||
|
else if test -e /etc/pki/tls/certs/ca-bundle.crt # Fedora, CentOS
|
||||||
|
set --export NIX_SSL_CERT_FILE /etc/pki/tls/certs/ca-bundle.crt
|
||||||
|
else if test -e "$NIX_LINK/etc/ssl/certs/ca-bundle.crt" # fall back to cacert in Nix profile
|
||||||
|
set --export NIX_SSL_CERT_FILE "$NIX_LINK/etc/ssl/certs/ca-bundle.crt"
|
||||||
|
else if test -e "$NIX_LINK/etc/ca-bundle.crt" # old cacert in Nix profile
|
||||||
|
set --export NIX_SSL_CERT_FILE "$NIX_LINK/etc/ca-bundle.crt"
|
||||||
|
end
|
||||||
|
|
||||||
|
# Only use MANPATH if it is already set. In general `man` will just simply
|
||||||
|
# pick up `.nix-profile/share/man` because is it close to `.nix-profile/bin`
|
||||||
|
# which is in the $PATH. For more info, run `manpath -d`.
|
||||||
|
set --export --prepend --path MANPATH "$NIX_LINK/share/man"
|
||||||
|
|
||||||
|
fish_add_path --prepend --global "$NIX_LINK/bin"
|
||||||
|
set --erase NIX_LINK
|
||||||
|
end
|
|
@ -1,7 +1,6 @@
|
||||||
if [ -n "$HOME" ] && [ -n "$USER" ]; then
|
if [ -n "$HOME" ] && [ -n "$USER" ]; then
|
||||||
|
|
||||||
# Set up the per-user profile.
|
# Set up the per-user profile.
|
||||||
# This part should be kept in sync with nixpkgs:nixos/modules/programs/shell.nix
|
|
||||||
|
|
||||||
NIX_LINK=$HOME/.nix-profile
|
NIX_LINK=$HOME/.nix-profile
|
||||||
|
|
||||||
|
|
|
@ -18,7 +18,7 @@ std::string renderMarkdownToTerminal(std::string_view markdown)
|
||||||
.hmargin = 0,
|
.hmargin = 0,
|
||||||
.vmargin = 0,
|
.vmargin = 0,
|
||||||
.feat = LOWDOWN_COMMONMARK | LOWDOWN_FENCED | LOWDOWN_DEFLIST | LOWDOWN_TABLES,
|
.feat = LOWDOWN_COMMONMARK | LOWDOWN_FENCED | LOWDOWN_DEFLIST | LOWDOWN_TABLES,
|
||||||
.oflags = 0,
|
.oflags = LOWDOWN_TERM_NOLINK,
|
||||||
};
|
};
|
||||||
|
|
||||||
auto doc = lowdown_doc_new(&opts);
|
auto doc = lowdown_doc_new(&opts);
|
||||||
|
|
|
@ -12,13 +12,13 @@
|
||||||
, executable ? false
|
, executable ? false
|
||||||
, unpack ? false
|
, unpack ? false
|
||||||
, name ? baseNameOf (toString url)
|
, name ? baseNameOf (toString url)
|
||||||
|
, impure ? false
|
||||||
}:
|
}:
|
||||||
|
|
||||||
derivation {
|
derivation ({
|
||||||
builder = "builtin:fetchurl";
|
builder = "builtin:fetchurl";
|
||||||
|
|
||||||
# New-style output content requirements.
|
# New-style output content requirements.
|
||||||
inherit outputHashAlgo outputHash;
|
|
||||||
outputHashMode = if unpack || executable then "recursive" else "flat";
|
outputHashMode = if unpack || executable then "recursive" else "flat";
|
||||||
|
|
||||||
inherit name url executable unpack;
|
inherit name url executable unpack;
|
||||||
|
@ -38,4 +38,6 @@ derivation {
|
||||||
|
|
||||||
# To make "nix-prefetch-url" work.
|
# To make "nix-prefetch-url" work.
|
||||||
urls = [ url ];
|
urls = [ url ];
|
||||||
}
|
} // (if impure
|
||||||
|
then { __impure = true; }
|
||||||
|
else { inherit outputHashAlgo outputHash; }))
|
||||||
|
|
|
@ -4,6 +4,7 @@
|
||||||
#include "gc-store.hh"
|
#include "gc-store.hh"
|
||||||
#include "util.hh"
|
#include "util.hh"
|
||||||
#include "loggers.hh"
|
#include "loggers.hh"
|
||||||
|
#include "progress-bar.hh"
|
||||||
|
|
||||||
#include <algorithm>
|
#include <algorithm>
|
||||||
#include <cctype>
|
#include <cctype>
|
||||||
|
@ -181,8 +182,9 @@ void initNix()
|
||||||
/* Reset SIGCHLD to its default. */
|
/* Reset SIGCHLD to its default. */
|
||||||
struct sigaction act;
|
struct sigaction act;
|
||||||
sigemptyset(&act.sa_mask);
|
sigemptyset(&act.sa_mask);
|
||||||
act.sa_handler = SIG_DFL;
|
|
||||||
act.sa_flags = 0;
|
act.sa_flags = 0;
|
||||||
|
|
||||||
|
act.sa_handler = SIG_DFL;
|
||||||
if (sigaction(SIGCHLD, &act, 0))
|
if (sigaction(SIGCHLD, &act, 0))
|
||||||
throw SysError("resetting SIGCHLD");
|
throw SysError("resetting SIGCHLD");
|
||||||
|
|
||||||
|
@ -194,9 +196,20 @@ void initNix()
|
||||||
/* HACK: on darwin, we need can’t use sigprocmask with SIGWINCH.
|
/* HACK: on darwin, we need can’t use sigprocmask with SIGWINCH.
|
||||||
* Instead, add a dummy sigaction handler, and signalHandlerThread
|
* Instead, add a dummy sigaction handler, and signalHandlerThread
|
||||||
* can handle the rest. */
|
* can handle the rest. */
|
||||||
struct sigaction sa;
|
act.sa_handler = sigHandler;
|
||||||
sa.sa_handler = sigHandler;
|
if (sigaction(SIGWINCH, &act, 0)) throw SysError("handling SIGWINCH");
|
||||||
if (sigaction(SIGWINCH, &sa, 0)) throw SysError("handling SIGWINCH");
|
|
||||||
|
/* Disable SA_RESTART for interrupts, so that system calls on this thread
|
||||||
|
* error with EINTR like they do on Linux.
|
||||||
|
* Most signals on BSD systems default to SA_RESTART on, but Nix
|
||||||
|
* expects EINTR from syscalls to properly exit. */
|
||||||
|
act.sa_handler = SIG_DFL;
|
||||||
|
if (sigaction(SIGINT, &act, 0)) throw SysError("handling SIGINT");
|
||||||
|
if (sigaction(SIGTERM, &act, 0)) throw SysError("handling SIGTERM");
|
||||||
|
if (sigaction(SIGHUP, &act, 0)) throw SysError("handling SIGHUP");
|
||||||
|
if (sigaction(SIGPIPE, &act, 0)) throw SysError("handling SIGPIPE");
|
||||||
|
if (sigaction(SIGQUIT, &act, 0)) throw SysError("handling SIGQUIT");
|
||||||
|
if (sigaction(SIGTRAP, &act, 0)) throw SysError("handling SIGTRAP");
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
/* Register a SIGSEGV handler to detect stack overflows. */
|
/* Register a SIGSEGV handler to detect stack overflows. */
|
||||||
|
@ -410,6 +423,8 @@ RunPager::RunPager()
|
||||||
if (!pager) pager = getenv("PAGER");
|
if (!pager) pager = getenv("PAGER");
|
||||||
if (pager && ((std::string) pager == "" || (std::string) pager == "cat")) return;
|
if (pager && ((std::string) pager == "" || (std::string) pager == "cat")) return;
|
||||||
|
|
||||||
|
stopProgressBar();
|
||||||
|
|
||||||
Pipe toPager;
|
Pipe toPager;
|
||||||
toPager.create();
|
toPager.create();
|
||||||
|
|
||||||
|
|
|
@ -344,7 +344,7 @@ void DerivationGoal::gaveUpOnSubstitution()
|
||||||
for (auto & i : dynamic_cast<Derivation *>(drv.get())->inputDrvs) {
|
for (auto & i : dynamic_cast<Derivation *>(drv.get())->inputDrvs) {
|
||||||
/* Ensure that pure, non-fixed-output derivations don't
|
/* Ensure that pure, non-fixed-output derivations don't
|
||||||
depend on impure derivations. */
|
depend on impure derivations. */
|
||||||
if (drv->type().isPure() && !drv->type().isFixed()) {
|
if (settings.isExperimentalFeatureEnabled(Xp::ImpureDerivations) && drv->type().isPure() && !drv->type().isFixed()) {
|
||||||
auto inputDrv = worker.evalStore.readDerivation(i.first);
|
auto inputDrv = worker.evalStore.readDerivation(i.first);
|
||||||
if (!inputDrv.type().isPure())
|
if (!inputDrv.type().isPure())
|
||||||
throw Error("pure derivation '%s' depends on impure derivation '%s'",
|
throw Error("pure derivation '%s' depends on impure derivation '%s'",
|
||||||
|
|
|
@ -619,6 +619,17 @@ void LocalStore::collectGarbage(const GCOptions & options, GCResults & results)
|
||||||
Path path = storeDir + "/" + std::string(baseName);
|
Path path = storeDir + "/" + std::string(baseName);
|
||||||
Path realPath = realStoreDir + "/" + std::string(baseName);
|
Path realPath = realStoreDir + "/" + std::string(baseName);
|
||||||
|
|
||||||
|
/* There may be temp directories in the store that are still in use
|
||||||
|
by another process. We need to be sure that we can acquire an
|
||||||
|
exclusive lock before deleting them. */
|
||||||
|
if (baseName.find("tmp-", 0) == 0) {
|
||||||
|
AutoCloseFD tmpDirFd = open(realPath.c_str(), O_RDONLY | O_DIRECTORY);
|
||||||
|
if (tmpDirFd.get() == -1 || !lockFile(tmpDirFd.get(), ltWrite, false)) {
|
||||||
|
debug("skipping locked tempdir '%s'", realPath);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
printInfo("deleting '%1%'", path);
|
printInfo("deleting '%1%'", path);
|
||||||
|
|
||||||
results.paths.insert(path);
|
results.paths.insert(path);
|
||||||
|
|
|
@ -1382,13 +1382,15 @@ StorePath LocalStore::addToStoreFromDump(Source & source0, std::string_view name
|
||||||
|
|
||||||
std::unique_ptr<AutoDelete> delTempDir;
|
std::unique_ptr<AutoDelete> delTempDir;
|
||||||
Path tempPath;
|
Path tempPath;
|
||||||
|
Path tempDir;
|
||||||
|
AutoCloseFD tempDirFd;
|
||||||
|
|
||||||
if (!inMemory) {
|
if (!inMemory) {
|
||||||
/* Drain what we pulled so far, and then keep on pulling */
|
/* Drain what we pulled so far, and then keep on pulling */
|
||||||
StringSource dumpSource { dump };
|
StringSource dumpSource { dump };
|
||||||
ChainSource bothSource { dumpSource, source };
|
ChainSource bothSource { dumpSource, source };
|
||||||
|
|
||||||
auto tempDir = createTempDir(realStoreDir, "add");
|
std::tie(tempDir, tempDirFd) = createTempDirInStore();
|
||||||
delTempDir = std::make_unique<AutoDelete>(tempDir);
|
delTempDir = std::make_unique<AutoDelete>(tempDir);
|
||||||
tempPath = tempDir + "/x";
|
tempPath = tempDir + "/x";
|
||||||
|
|
||||||
|
@ -1507,18 +1509,24 @@ StorePath LocalStore::addTextToStore(
|
||||||
|
|
||||||
|
|
||||||
/* Create a temporary directory in the store that won't be
|
/* Create a temporary directory in the store that won't be
|
||||||
garbage-collected. */
|
garbage-collected until the returned FD is closed. */
|
||||||
Path LocalStore::createTempDirInStore()
|
std::pair<Path, AutoCloseFD> LocalStore::createTempDirInStore()
|
||||||
{
|
{
|
||||||
Path tmpDir;
|
Path tmpDirFn;
|
||||||
|
AutoCloseFD tmpDirFd;
|
||||||
|
bool lockedByUs = false;
|
||||||
do {
|
do {
|
||||||
/* There is a slight possibility that `tmpDir' gets deleted by
|
/* There is a slight possibility that `tmpDir' gets deleted by
|
||||||
the GC between createTempDir() and addTempRoot(), so repeat
|
the GC between createTempDir() and when we acquire a lock on it.
|
||||||
until `tmpDir' exists. */
|
We'll repeat until 'tmpDir' exists and we've locked it. */
|
||||||
tmpDir = createTempDir(realStoreDir);
|
tmpDirFn = createTempDir(realStoreDir, "tmp");
|
||||||
addTempRoot(parseStorePath(tmpDir));
|
tmpDirFd = open(tmpDirFn.c_str(), O_RDONLY | O_DIRECTORY);
|
||||||
} while (!pathExists(tmpDir));
|
if (tmpDirFd.get() < 0) {
|
||||||
return tmpDir;
|
continue;
|
||||||
|
}
|
||||||
|
lockedByUs = lockFile(tmpDirFd.get(), ltWrite, true);
|
||||||
|
} while (!pathExists(tmpDirFn) || !lockedByUs);
|
||||||
|
return {tmpDirFn, std::move(tmpDirFd)};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -256,7 +256,7 @@ private:
|
||||||
|
|
||||||
void findRuntimeRoots(Roots & roots, bool censor);
|
void findRuntimeRoots(Roots & roots, bool censor);
|
||||||
|
|
||||||
Path createTempDirInStore();
|
std::pair<Path, AutoCloseFD> createTempDirInStore();
|
||||||
|
|
||||||
void checkDerivationOutputs(const StorePath & drvPath, const Derivation & drv);
|
void checkDerivationOutputs(const StorePath & drvPath, const Derivation & drv);
|
||||||
|
|
||||||
|
|
|
@ -14,3 +14,7 @@
|
||||||
|
|
||||||
; Allow DNS lookups.
|
; Allow DNS lookups.
|
||||||
(allow network-outbound (remote unix-socket (path-literal "/private/var/run/mDNSResponder")))
|
(allow network-outbound (remote unix-socket (path-literal "/private/var/run/mDNSResponder")))
|
||||||
|
|
||||||
|
; Allow access to trustd.
|
||||||
|
(allow mach-lookup (global-name "com.apple.trustd"))
|
||||||
|
(allow mach-lookup (global-name "com.apple.trustd.agent"))
|
||||||
|
|
|
@ -216,7 +216,7 @@ nlohmann::json Args::toJSON()
|
||||||
if (flag->shortName)
|
if (flag->shortName)
|
||||||
j["shortName"] = std::string(1, flag->shortName);
|
j["shortName"] = std::string(1, flag->shortName);
|
||||||
if (flag->description != "")
|
if (flag->description != "")
|
||||||
j["description"] = flag->description;
|
j["description"] = trim(flag->description);
|
||||||
j["category"] = flag->category;
|
j["category"] = flag->category;
|
||||||
if (flag->handler.arity != ArityAny)
|
if (flag->handler.arity != ArityAny)
|
||||||
j["arity"] = flag->handler.arity;
|
j["arity"] = flag->handler.arity;
|
||||||
|
@ -237,7 +237,7 @@ nlohmann::json Args::toJSON()
|
||||||
}
|
}
|
||||||
|
|
||||||
auto res = nlohmann::json::object();
|
auto res = nlohmann::json::object();
|
||||||
res["description"] = description();
|
res["description"] = trim(description());
|
||||||
res["flags"] = std::move(flags);
|
res["flags"] = std::move(flags);
|
||||||
res["args"] = std::move(args);
|
res["args"] = std::move(args);
|
||||||
auto s = doc();
|
auto s = doc();
|
||||||
|
@ -379,7 +379,7 @@ nlohmann::json MultiCommand::toJSON()
|
||||||
auto j = command->toJSON();
|
auto j = command->toJSON();
|
||||||
auto cat = nlohmann::json::object();
|
auto cat = nlohmann::json::object();
|
||||||
cat["id"] = command->category();
|
cat["id"] = command->category();
|
||||||
cat["description"] = categories[command->category()];
|
cat["description"] = trim(categories[command->category()]);
|
||||||
j["category"] = std::move(cat);
|
j["category"] = std::move(cat);
|
||||||
cmds[name] = std::move(j);
|
cmds[name] = std::move(j);
|
||||||
}
|
}
|
||||||
|
|
|
@ -66,6 +66,12 @@ R""(
|
||||||
`nixpkgs#glibc` in `~/my-glibc` and want to compile another package
|
`nixpkgs#glibc` in `~/my-glibc` and want to compile another package
|
||||||
against it.
|
against it.
|
||||||
|
|
||||||
|
* Run a series of script commands:
|
||||||
|
|
||||||
|
```console
|
||||||
|
# nix develop --command bash -c "mkdir build && cmake .. && make"
|
||||||
|
```
|
||||||
|
|
||||||
# Description
|
# Description
|
||||||
|
|
||||||
`nix develop` starts a `bash` shell that provides an interactive build
|
`nix develop` starts a `bash` shell that provides an interactive build
|
||||||
|
|
|
@ -325,7 +325,7 @@ void mainWrapped(int argc, char * * argv)
|
||||||
std::cout << "attrs\n"; break;
|
std::cout << "attrs\n"; break;
|
||||||
}
|
}
|
||||||
for (auto & s : *completions)
|
for (auto & s : *completions)
|
||||||
std::cout << s.completion << "\t" << s.description << "\n";
|
std::cout << s.completion << "\t" << trim(s.description) << "\n";
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|
|
@ -23,6 +23,12 @@ R""(
|
||||||
Hi everybody!
|
Hi everybody!
|
||||||
```
|
```
|
||||||
|
|
||||||
|
* Run multiple commands in a shell environment:
|
||||||
|
|
||||||
|
```console
|
||||||
|
# nix shell nixpkgs#gnumake -c sh -c "cd src && make"
|
||||||
|
```
|
||||||
|
|
||||||
* Run GNU Hello in a chroot store:
|
* Run GNU Hello in a chroot store:
|
||||||
|
|
||||||
```console
|
```console
|
||||||
|
|
Loading…
Add table
Reference in a new issue