mirror of
https://github.com/privatevoid-net/nix-super.git
synced 2024-11-27 00:06:16 +02:00
Merge remote-tracking branch 'origin/master' into flakes
This commit is contained in:
commit
b5565a7081
36 changed files with 470 additions and 180 deletions
3
.gitignore
vendored
3
.gitignore
vendored
|
@ -81,6 +81,9 @@ perl/Makefile.config
|
|||
/tests/common.sh
|
||||
/tests/dummy
|
||||
/tests/result*
|
||||
/tests/restricted-innocent
|
||||
/tests/shell
|
||||
/tests/shell.drv
|
||||
|
||||
# /tests/lang/
|
||||
/tests/lang/*.out
|
||||
|
|
|
@ -1,4 +1,6 @@
|
|||
AR = @AR@
|
||||
BDW_GC_LIBS = @BDW_GC_LIBS@
|
||||
BUILD_SHARED_LIBS = @BUILD_SHARED_LIBS@
|
||||
CC = @CC@
|
||||
CFLAGS = @CFLAGS@
|
||||
CXX = @CXX@
|
||||
|
|
12
configure.ac
12
configure.ac
|
@ -1,4 +1,4 @@
|
|||
AC_INIT(nix, m4_esyscmd([bash -c "echo -n $(cat ./version)$VERSION_SUFFIX"]))
|
||||
AC_INIT(nix, m4_esyscmd([bash -c "echo -n $(cat ./.version)$VERSION_SUFFIX"]))
|
||||
AC_CONFIG_SRCDIR(README.md)
|
||||
AC_CONFIG_AUX_DIR(config)
|
||||
|
||||
|
@ -64,6 +64,7 @@ AC_PROG_CXX
|
|||
AC_PROG_CPP
|
||||
AX_CXX_COMPILE_STDCXX_17
|
||||
|
||||
AC_CHECK_TOOL([AR], [ar])
|
||||
|
||||
# Use 64-bit file system calls so that we can support files > 2 GiB.
|
||||
AC_SYS_LARGEFILE
|
||||
|
@ -267,6 +268,15 @@ AC_ARG_WITH(sandbox-shell, AC_HELP_STRING([--with-sandbox-shell=PATH],
|
|||
sandbox_shell=$withval)
|
||||
AC_SUBST(sandbox_shell)
|
||||
|
||||
AC_ARG_ENABLE(shared, AC_HELP_STRING([--enable-shared],
|
||||
[Build shared libraries for Nix [default=yes]]),
|
||||
shared=$enableval, shared=yes)
|
||||
if test "$shared" = yes; then
|
||||
AC_SUBST(BUILD_SHARED_LIBS, 1, [Whether to build shared libraries.])
|
||||
else
|
||||
AC_SUBST(BUILD_SHARED_LIBS, 0, [Whether to build shared libraries.])
|
||||
fi
|
||||
|
||||
|
||||
# Expand all variables in config.status.
|
||||
test "$prefix" = NONE && prefix=$ac_default_prefix
|
||||
|
|
|
@ -180,4 +180,8 @@ builders = @/etc/nix/machines
|
|||
causes the list of machines in <filename>/etc/nix/machines</filename>
|
||||
to be included. (This is the default.)</para>
|
||||
|
||||
<para>If you want the builders to use caches, you likely want to set
|
||||
the option <link linkend='conf-builders-use-substitutes'><literal>builders-use-substitutes</literal></link>
|
||||
in your local <filename>nix.conf</filename>.</para>
|
||||
|
||||
</chapter>
|
||||
|
|
|
@ -52,10 +52,15 @@ nixpkgs=/home/eelco/Dev/nixpkgs-branch:/etc/nixos</screen>
|
|||
<envar>NIX_PATH</envar> to
|
||||
|
||||
<screen>
|
||||
nixpkgs=https://github.com/NixOS/nixpkgs-channels/archive/nixos-14.12.tar.gz</screen>
|
||||
nixpkgs=https://github.com/NixOS/nixpkgs-channels/archive/nixos-15.09.tar.gz</screen>
|
||||
|
||||
tells Nix to download the latest revision in the Nixpkgs/NixOS
|
||||
14.12 channel.</para>
|
||||
15.09 channel.</para>
|
||||
|
||||
<para>A following shorthand can be used to refer to the official channels:
|
||||
|
||||
<screen>nixpkgs=channel:nixos-15.09</screen>
|
||||
</para>
|
||||
|
||||
<para>The search path can be extended using the <option
|
||||
linkend="opt-I">-I</option> option, which takes precedence over
|
||||
|
|
|
@ -1282,6 +1282,7 @@ ktorrent-2.2.1/NEWS
|
|||
<cmdsynopsis>
|
||||
<command>nix-store</command>
|
||||
<arg choice='plain'><option>--dump-db</option></arg>
|
||||
<arg rep='repeat'><replaceable>paths</replaceable></arg>
|
||||
</cmdsynopsis>
|
||||
</refsection>
|
||||
|
||||
|
@ -1292,6 +1293,13 @@ Nix database to standard output. It can be loaded into an empty Nix
|
|||
store using <option>--load-db</option>. This is useful for making
|
||||
backups and when migrating to different database schemas.</para>
|
||||
|
||||
<para>By default, <option>--dump-db</option> will dump the entire Nix
|
||||
database. When one or more store paths is passed, only the subset of
|
||||
the Nix database for those store paths is dumped. As with
|
||||
<option>--export</option>, the user is responsible for passing all the
|
||||
store paths for a closure. See <option>--export</option> for an
|
||||
example.</para>
|
||||
|
||||
</refsection>
|
||||
|
||||
</refsection>
|
||||
|
|
|
@ -23,6 +23,7 @@ available as <function>builtins.derivation</function>.</para>
|
|||
|
||||
<varlistentry xml:id='builtin-abort'>
|
||||
<term><function>abort</function> <replaceable>s</replaceable></term>
|
||||
<term><function>builtins.abort</function> <replaceable>s</replaceable></term>
|
||||
|
||||
<listitem><para>Abort Nix expression evaluation, print error
|
||||
message <replaceable>s</replaceable>.</para></listitem>
|
||||
|
@ -251,6 +252,8 @@ if builtins ? getEnv then builtins.getEnv "PATH" else ""</programlisting>
|
|||
<varlistentry xml:id='builtin-derivation'>
|
||||
<term><function>derivation</function>
|
||||
<replaceable>attrs</replaceable></term>
|
||||
<term><function>builtins.derivation</function>
|
||||
<replaceable>attrs</replaceable></term>
|
||||
|
||||
<listitem><para><function>derivation</function> is described in
|
||||
<xref linkend='ssec-derivation' />.</para></listitem>
|
||||
|
@ -260,6 +263,7 @@ if builtins ? getEnv then builtins.getEnv "PATH" else ""</programlisting>
|
|||
|
||||
<varlistentry xml:id='builtin-dirOf'>
|
||||
<term><function>dirOf</function> <replaceable>s</replaceable></term>
|
||||
<term><function>builtins.dirOf</function> <replaceable>s</replaceable></term>
|
||||
|
||||
<listitem><para>Return the directory part of the string
|
||||
<replaceable>s</replaceable>, that is, everything before the final
|
||||
|
@ -318,6 +322,8 @@ if builtins ? getEnv then builtins.getEnv "PATH" else ""</programlisting>
|
|||
<varlistentry xml:id='builtin-fetchTarball'>
|
||||
<term><function>fetchTarball</function>
|
||||
<replaceable>url</replaceable></term>
|
||||
<term><function>builtins.fetchTarball</function>
|
||||
<replaceable>url</replaceable></term>
|
||||
|
||||
<listitem><para>Download the specified URL, unpack it and return
|
||||
the path of the unpacked tree. The file must be a tape archive
|
||||
|
@ -693,8 +699,8 @@ builtins.genList (x: x * x) 5
|
|||
<listitem><para>Return a base-16 representation of the
|
||||
cryptographic hash of string <replaceable>s</replaceable>. The
|
||||
hash algorithm specified by <replaceable>type</replaceable> must
|
||||
be one of <literal>"md5"</literal>, <literal>"sha1"</literal> or
|
||||
<literal>"sha256"</literal>.</para></listitem>
|
||||
be one of <literal>"md5"</literal>, <literal>"sha1"</literal>,
|
||||
<literal>"sha256"</literal> or <literal>"sha512"</literal>.</para></listitem>
|
||||
|
||||
</varlistentry>
|
||||
|
||||
|
@ -714,6 +720,8 @@ builtins.genList (x: x * x) 5
|
|||
<varlistentry xml:id='builtin-import'>
|
||||
<term><function>import</function>
|
||||
<replaceable>path</replaceable></term>
|
||||
<term><function>builtins.import</function>
|
||||
<replaceable>path</replaceable></term>
|
||||
|
||||
<listitem><para>Load, parse and return the Nix expression in the
|
||||
file <replaceable>path</replaceable>. If <replaceable>path
|
||||
|
@ -853,10 +861,20 @@ x: x + 456</programlisting>
|
|||
|
||||
</varlistentry>
|
||||
|
||||
<varlistentry><term><function>builtins.isPath</function>
|
||||
<replaceable>e</replaceable></term>
|
||||
|
||||
<listitem><para>Return <literal>true</literal> if
|
||||
<replaceable>e</replaceable> evaluates to a path, and
|
||||
<literal>false</literal> otherwise.</para></listitem>
|
||||
|
||||
</varlistentry>
|
||||
|
||||
<varlistentry xml:id='builtin-isNull'>
|
||||
<term><function>isNull</function>
|
||||
<replaceable>e</replaceable></term>
|
||||
<term><function>builtins.isNull</function>
|
||||
<replaceable>e</replaceable></term>
|
||||
|
||||
<listitem><para>Return <literal>true</literal> if
|
||||
<replaceable>e</replaceable> evaluates to <literal>null</literal>,
|
||||
|
@ -925,6 +943,8 @@ builtins.listToAttrs
|
|||
<varlistentry xml:id='builtin-map'>
|
||||
<term><function>map</function>
|
||||
<replaceable>f</replaceable> <replaceable>list</replaceable></term>
|
||||
<term><function>builtins.map</function>
|
||||
<replaceable>f</replaceable> <replaceable>list</replaceable></term>
|
||||
|
||||
<listitem><para>Apply the function <replaceable>f</replaceable> to
|
||||
each element in the list <replaceable>list</replaceable>. For
|
||||
|
@ -1119,6 +1139,8 @@ Evaluates to <literal>[ "foo" ]</literal>.
|
|||
<varlistentry xml:id='builtin-removeAttrs'>
|
||||
<term><function>removeAttrs</function>
|
||||
<replaceable>set</replaceable> <replaceable>list</replaceable></term>
|
||||
<term><function>builtins.removeAttrs</function>
|
||||
<replaceable>set</replaceable> <replaceable>list</replaceable></term>
|
||||
|
||||
<listitem><para>Remove the attributes listed in
|
||||
<replaceable>list</replaceable> from
|
||||
|
@ -1287,6 +1309,8 @@ builtins.substring 0 3 "nixos"
|
|||
<varlistentry xml:id='builtin-throw'>
|
||||
<term><function>throw</function>
|
||||
<replaceable>s</replaceable></term>
|
||||
<term><function>builtins.throw</function>
|
||||
<replaceable>s</replaceable></term>
|
||||
|
||||
<listitem><para>Throw an error message
|
||||
<replaceable>s</replaceable>. This usually aborts Nix expression
|
||||
|
@ -1405,6 +1429,7 @@ in foo</programlisting>
|
|||
|
||||
<varlistentry xml:id='builtin-toString'>
|
||||
<term><function>toString</function> <replaceable>e</replaceable></term>
|
||||
<term><function>builtins.toString</function> <replaceable>e</replaceable></term>
|
||||
|
||||
<listitem><para>Convert the expression
|
||||
<replaceable>e</replaceable> to a string.
|
||||
|
|
|
@ -89,7 +89,7 @@ the S3 URL:</para>
|
|||
"Version": "2012-10-17",
|
||||
"Statement": [
|
||||
{
|
||||
"Sid": "AlowDirectReads",
|
||||
"Sid": "AllowDirectReads",
|
||||
"Action": [
|
||||
"s3:GetObject",
|
||||
"s3:GetBucketLocation"
|
||||
|
|
|
@ -2,6 +2,11 @@
|
|||
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
|
||||
<plist version="1.0">
|
||||
<dict>
|
||||
<key>EnvironmentVariables</key>
|
||||
<dict>
|
||||
<key>OBJC_DISABLE_INITIALIZE_FORK_SAFETY</key>
|
||||
<string>YES</string>
|
||||
</dict>
|
||||
<key>Label</key>
|
||||
<string>org.nixos.nix-daemon</string>
|
||||
<key>KeepAlive</key>
|
||||
|
|
|
@ -125,7 +125,7 @@ define build-library
|
|||
$(1)_PATH := $$(_d)/$$($(1)_NAME).a
|
||||
|
||||
$$($(1)_PATH): $$($(1)_OBJS) | $$(_d)/
|
||||
$(trace-ar) ar crs $$@ $$?
|
||||
$(trace-ar) $(AR) crs $$@ $$?
|
||||
|
||||
$(1)_LDFLAGS_USE += $$($(1)_PATH) $$($(1)_LDFLAGS)
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
AC_INIT(nix-perl, m4_esyscmd([bash -c "echo -n $(cat ../version)$VERSION_SUFFIX"]))
|
||||
AC_INIT(nix-perl, m4_esyscmd([bash -c "echo -n $(cat ../.version)$VERSION_SUFFIX"]))
|
||||
AC_CONFIG_SRCDIR(MANIFEST)
|
||||
AC_CONFIG_AUX_DIR(../config)
|
||||
|
||||
|
|
|
@ -18,7 +18,7 @@ let
|
|||
|
||||
releaseTools.sourceTarball {
|
||||
name = "nix-tarball";
|
||||
version = builtins.readFile ./version;
|
||||
version = builtins.readFile ./.version;
|
||||
versionSuffix = if officialRelease then "" else "pre${toString nix.revCount}_${nix.shortRev}";
|
||||
src = nix;
|
||||
inherit officialRelease;
|
||||
|
@ -278,7 +278,6 @@ let
|
|||
pkgs.runCommand "eval-nixos" { buildInputs = [ build.x86_64-linux ]; }
|
||||
''
|
||||
export NIX_STATE_DIR=$TMPDIR
|
||||
nix-store --init
|
||||
|
||||
nix-instantiate ${nixpkgs}/nixos/release-combined.nix -A tested --dry-run \
|
||||
--arg nixpkgs '{ outPath = ${nixpkgs}; revCount = 123; shortRev = "abcdefgh"; }'
|
||||
|
|
|
@ -674,9 +674,6 @@ $NIX_INSTALLED_NIX.
|
|||
EOF
|
||||
fi
|
||||
|
||||
_sudo "to initialize the Nix Database" \
|
||||
$NIX_INSTALLED_NIX/bin/nix-store --init
|
||||
|
||||
cat ./.reginfo \
|
||||
| _sudo "to load data for the first time in to the Nix Database" \
|
||||
"$NIX_INSTALLED_NIX/bin/nix-store" --load-db
|
||||
|
@ -747,7 +744,6 @@ build-users-group = $NIX_BUILD_GROUP_NAME
|
|||
|
||||
max-jobs = $NIX_USER_COUNT
|
||||
cores = 1
|
||||
sandbox = false
|
||||
EOF
|
||||
_sudo "to place the default nix daemon configuration (part 2)" \
|
||||
install -m 0664 "$SCRATCH/nix.conf" /etc/nix/nix.conf
|
||||
|
|
|
@ -109,12 +109,6 @@ for i in $(cd "$self/store" >/dev/null && echo ./*); do
|
|||
done
|
||||
echo "" >&2
|
||||
|
||||
echo "initialising Nix database..." >&2
|
||||
if ! $nix/bin/nix-store --init; then
|
||||
echo "$0: failed to initialize the Nix database" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! "$nix/bin/nix-store" --load-db < "$self/.reginfo"; then
|
||||
echo "$0: unable to register valid paths" >&2
|
||||
exit 1
|
||||
|
|
|
@ -75,7 +75,7 @@ if [ -n "$HOME" ] && [ -n "$USER" ]; then
|
|||
export NIX_SSL_CERT_FILE="$NIX_LINK/etc/ca-bundle.crt"
|
||||
fi
|
||||
|
||||
if [ -n "${MANPATH}" ]; then
|
||||
if [ -n "${MANPATH-}" ]; then
|
||||
export MANPATH="$NIX_LINK/share/man:$MANPATH"
|
||||
fi
|
||||
|
||||
|
|
|
@ -38,6 +38,12 @@ static AutoCloseFD openSlotLock(const Machine & m, unsigned long long slot)
|
|||
return openLockFile(fmt("%s/%s-%d", currentLoad, escapeUri(m.storeUri), slot), true);
|
||||
}
|
||||
|
||||
static bool allSupportedLocally(const std::set<std::string>& requiredFeatures) {
|
||||
for (auto & feature : requiredFeatures)
|
||||
if (!settings.systemFeatures.get().count(feature)) return false;
|
||||
return true;
|
||||
}
|
||||
|
||||
static int _main(int argc, char * * argv)
|
||||
{
|
||||
{
|
||||
|
@ -99,7 +105,8 @@ static int _main(int argc, char * * argv)
|
|||
|
||||
auto canBuildLocally = amWilling
|
||||
&& ( neededSystem == settings.thisSystem
|
||||
|| settings.extraPlatforms.get().count(neededSystem) > 0);
|
||||
|| settings.extraPlatforms.get().count(neededSystem) > 0)
|
||||
&& allSupportedLocally(requiredFeatures);
|
||||
|
||||
/* Error ignored here, will be caught later */
|
||||
mkdir(currentLoad.c_str(), 0777);
|
||||
|
|
|
@ -131,6 +131,16 @@ std::ostream & operator << (std::ostream & str, const Value & v)
|
|||
}
|
||||
|
||||
|
||||
const Value *getPrimOp(const Value &v) {
|
||||
const Value * primOp = &v;
|
||||
while (primOp->type == tPrimOpApp) {
|
||||
primOp = primOp->primOpApp.left;
|
||||
}
|
||||
assert(primOp->type == tPrimOp);
|
||||
return primOp;
|
||||
}
|
||||
|
||||
|
||||
string showType(const Value & v)
|
||||
{
|
||||
switch (v.type) {
|
||||
|
@ -145,8 +155,10 @@ string showType(const Value & v)
|
|||
case tApp: return "a function application";
|
||||
case tLambda: return "a function";
|
||||
case tBlackhole: return "a black hole";
|
||||
case tPrimOp: return "a built-in function";
|
||||
case tPrimOpApp: return "a partially applied built-in function";
|
||||
case tPrimOp:
|
||||
return fmt("the built-in function '%s'", string(v.primOp->name));
|
||||
case tPrimOpApp:
|
||||
return fmt("the partially applied built-in function '%s'", string(getPrimOp(v)->primOp->name));
|
||||
case tExternal: return v.external->showType();
|
||||
case tFloat: return "a float";
|
||||
}
|
||||
|
|
|
@ -327,6 +327,9 @@ private:
|
|||
/* Return a string representing the type of the value `v'. */
|
||||
string showType(const Value & v);
|
||||
|
||||
/* Decode a context string ‘!<name>!<path>’ into a pair <path,
|
||||
name>. */
|
||||
std::pair<string, string> decodeContext(const string & s);
|
||||
|
||||
/* If `path' refers to a directory, then append "/default.nix". */
|
||||
Path resolveExprPath(Path path);
|
||||
|
|
|
@ -315,6 +315,12 @@ static void prim_isBool(EvalState & state, const Pos & pos, Value * * args, Valu
|
|||
mkBool(v, args[0]->type == tBool);
|
||||
}
|
||||
|
||||
/* Determine whether the argument is a path. */
|
||||
static void prim_isPath(EvalState & state, const Pos & pos, Value * * args, Value & v)
|
||||
{
|
||||
state.forceValue(*args[0]);
|
||||
mkBool(v, args[0]->type == tPath);
|
||||
}
|
||||
|
||||
struct CompareValues
|
||||
{
|
||||
|
@ -687,21 +693,12 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
|
|||
}
|
||||
}
|
||||
|
||||
/* See prim_unsafeDiscardOutputDependency. */
|
||||
else if (path.at(0) == '~')
|
||||
drv.inputSrcs.insert(string(path, 1));
|
||||
|
||||
/* Handle derivation outputs of the form ‘!<name>!<path>’. */
|
||||
else if (path.at(0) == '!') {
|
||||
std::pair<string, string> ctx = decodeContext(path);
|
||||
drv.inputDrvs[ctx.first].insert(ctx.second);
|
||||
}
|
||||
|
||||
/* Handle derivation contexts returned by
|
||||
‘builtins.storePath’. */
|
||||
else if (isDerivation(path))
|
||||
drv.inputDrvs[path] = state.store->queryDerivationOutputNames(path);
|
||||
|
||||
/* Otherwise it's a source file. */
|
||||
else
|
||||
drv.inputSrcs.insert(path);
|
||||
|
@ -1004,13 +1001,8 @@ static void prim_toFile(EvalState & state, const Pos & pos, Value * * args, Valu
|
|||
PathSet refs;
|
||||
|
||||
for (auto path : context) {
|
||||
if (path.at(0) == '=') path = string(path, 1);
|
||||
if (isDerivation(path)) {
|
||||
/* See prim_unsafeDiscardOutputDependency. */
|
||||
if (path.at(0) != '~')
|
||||
if (path.at(0) != '/')
|
||||
throw EvalError(format("in 'toFile': the file '%1%' cannot refer to derivation outputs, at %2%") % name % pos);
|
||||
path = string(path, 1);
|
||||
}
|
||||
refs.insert(path);
|
||||
}
|
||||
|
||||
|
@ -1794,41 +1786,6 @@ static void prim_stringLength(EvalState & state, const Pos & pos, Value * * args
|
|||
}
|
||||
|
||||
|
||||
static void prim_unsafeDiscardStringContext(EvalState & state, const Pos & pos, Value * * args, Value & v)
|
||||
{
|
||||
PathSet context;
|
||||
string s = state.coerceToString(pos, *args[0], context);
|
||||
mkString(v, s, PathSet());
|
||||
}
|
||||
|
||||
|
||||
static void prim_hasContext(EvalState & state, const Pos & pos, Value * * args, Value & v)
|
||||
{
|
||||
PathSet context;
|
||||
state.forceString(*args[0], context, pos);
|
||||
mkBool(v, !context.empty());
|
||||
}
|
||||
|
||||
|
||||
/* Sometimes we want to pass a derivation path (i.e. pkg.drvPath) to a
|
||||
builder without causing the derivation to be built (for instance,
|
||||
in the derivation that builds NARs in nix-push, when doing
|
||||
source-only deployment). This primop marks the string context so
|
||||
that builtins.derivation adds the path to drv.inputSrcs rather than
|
||||
drv.inputDrvs. */
|
||||
static void prim_unsafeDiscardOutputDependency(EvalState & state, const Pos & pos, Value * * args, Value & v)
|
||||
{
|
||||
PathSet context;
|
||||
string s = state.coerceToString(pos, *args[0], context);
|
||||
|
||||
PathSet context2;
|
||||
for (auto & p : context)
|
||||
context2.insert(p.at(0) == '=' ? "~" + string(p, 1) : p);
|
||||
|
||||
mkString(v, s, context2);
|
||||
}
|
||||
|
||||
|
||||
/* Return the cryptographic hash of a string in base-16. */
|
||||
static void prim_hashString(EvalState & state, const Pos & pos, Value * * args, Value & v)
|
||||
{
|
||||
|
@ -2218,6 +2175,7 @@ void EvalState::createBaseEnv()
|
|||
addPrimOp("__isInt", 1, prim_isInt);
|
||||
addPrimOp("__isFloat", 1, prim_isFloat);
|
||||
addPrimOp("__isBool", 1, prim_isBool);
|
||||
addPrimOp("__isPath", 1, prim_isPath);
|
||||
addPrimOp("__genericClosure", 1, prim_genericClosure);
|
||||
addPrimOp("abort", 1, prim_abort);
|
||||
addPrimOp("__addErrorContext", 2, prim_addErrorContext);
|
||||
|
@ -2299,9 +2257,6 @@ void EvalState::createBaseEnv()
|
|||
addPrimOp("toString", 1, prim_toString);
|
||||
addPrimOp("__substring", 3, prim_substring);
|
||||
addPrimOp("__stringLength", 1, prim_stringLength);
|
||||
addPrimOp("__hasContext", 1, prim_hasContext);
|
||||
addPrimOp("__unsafeDiscardStringContext", 1, prim_unsafeDiscardStringContext);
|
||||
addPrimOp("__unsafeDiscardOutputDependency", 1, prim_unsafeDiscardOutputDependency);
|
||||
addPrimOp("__hashString", 2, prim_hashString);
|
||||
addPrimOp("__match", 2, prim_match);
|
||||
addPrimOp("__split", 2, prim_split);
|
||||
|
|
187
src/libexpr/primops/context.cc
Normal file
187
src/libexpr/primops/context.cc
Normal file
|
@ -0,0 +1,187 @@
|
|||
#include "primops.hh"
|
||||
#include "eval-inline.hh"
|
||||
#include "derivations.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
static void prim_unsafeDiscardStringContext(EvalState & state, const Pos & pos, Value * * args, Value & v)
|
||||
{
|
||||
PathSet context;
|
||||
string s = state.coerceToString(pos, *args[0], context);
|
||||
mkString(v, s, PathSet());
|
||||
}
|
||||
|
||||
static RegisterPrimOp r1("__unsafeDiscardStringContext", 1, prim_unsafeDiscardStringContext);
|
||||
|
||||
|
||||
static void prim_hasContext(EvalState & state, const Pos & pos, Value * * args, Value & v)
|
||||
{
|
||||
PathSet context;
|
||||
state.forceString(*args[0], context, pos);
|
||||
mkBool(v, !context.empty());
|
||||
}
|
||||
|
||||
static RegisterPrimOp r2("__hasContext", 1, prim_hasContext);
|
||||
|
||||
|
||||
/* Sometimes we want to pass a derivation path (i.e. pkg.drvPath) to a
|
||||
builder without causing the derivation to be built (for instance,
|
||||
in the derivation that builds NARs in nix-push, when doing
|
||||
source-only deployment). This primop marks the string context so
|
||||
that builtins.derivation adds the path to drv.inputSrcs rather than
|
||||
drv.inputDrvs. */
|
||||
static void prim_unsafeDiscardOutputDependency(EvalState & state, const Pos & pos, Value * * args, Value & v)
|
||||
{
|
||||
PathSet context;
|
||||
string s = state.coerceToString(pos, *args[0], context);
|
||||
|
||||
PathSet context2;
|
||||
for (auto & p : context)
|
||||
context2.insert(p.at(0) == '=' ? string(p, 1) : p);
|
||||
|
||||
mkString(v, s, context2);
|
||||
}
|
||||
|
||||
static RegisterPrimOp r3("__unsafeDiscardOutputDependency", 1, prim_unsafeDiscardOutputDependency);
|
||||
|
||||
|
||||
/* Extract the context of a string as a structured Nix value.
|
||||
|
||||
The context is represented as an attribute set whose keys are the
|
||||
paths in the context set and whose values are attribute sets with
|
||||
the following keys:
|
||||
path: True if the relevant path is in the context as a plain store
|
||||
path (i.e. the kind of context you get when interpolating
|
||||
a Nix path (e.g. ./.) into a string). False if missing.
|
||||
allOutputs: True if the relevant path is a derivation and it is
|
||||
in the context as a drv file with all of its outputs
|
||||
(i.e. the kind of context you get when referencing
|
||||
.drvPath of some derivation). False if missing.
|
||||
outputs: If a non-empty list, the relevant path is a derivation
|
||||
and the provided outputs are referenced in the context
|
||||
(i.e. the kind of context you get when referencing
|
||||
.outPath of some derivation). Empty list if missing.
|
||||
Note that for a given path any combination of the above attributes
|
||||
may be present.
|
||||
*/
|
||||
static void prim_getContext(EvalState & state, const Pos & pos, Value * * args, Value & v)
|
||||
{
|
||||
struct ContextInfo {
|
||||
bool path = false;
|
||||
bool allOutputs = false;
|
||||
Strings outputs;
|
||||
};
|
||||
PathSet context;
|
||||
state.forceString(*args[0], context, pos);
|
||||
auto contextInfos = std::map<Path, ContextInfo>();
|
||||
for (const auto & p : context) {
|
||||
Path drv;
|
||||
string output;
|
||||
const Path * path = &p;
|
||||
if (p.at(0) == '=') {
|
||||
drv = string(p, 1);
|
||||
path = &drv;
|
||||
} else if (p.at(0) == '!') {
|
||||
std::pair<string, string> ctx = decodeContext(p);
|
||||
drv = ctx.first;
|
||||
output = ctx.second;
|
||||
path = &drv;
|
||||
}
|
||||
auto isPath = drv.empty();
|
||||
auto isAllOutputs = (!drv.empty()) && output.empty();
|
||||
|
||||
auto iter = contextInfos.find(*path);
|
||||
if (iter == contextInfos.end()) {
|
||||
contextInfos.emplace(*path, ContextInfo{isPath, isAllOutputs, output.empty() ? Strings{} : Strings{std::move(output)}});
|
||||
} else {
|
||||
if (isPath)
|
||||
iter->second.path = true;
|
||||
else if (isAllOutputs)
|
||||
iter->second.allOutputs = true;
|
||||
else
|
||||
iter->second.outputs.emplace_back(std::move(output));
|
||||
}
|
||||
}
|
||||
|
||||
state.mkAttrs(v, contextInfos.size());
|
||||
|
||||
auto sPath = state.symbols.create("path");
|
||||
auto sAllOutputs = state.symbols.create("allOutputs");
|
||||
for (const auto & info : contextInfos) {
|
||||
auto & infoVal = *state.allocAttr(v, state.symbols.create(info.first));
|
||||
state.mkAttrs(infoVal, 3);
|
||||
if (info.second.path)
|
||||
mkBool(*state.allocAttr(infoVal, sPath), true);
|
||||
if (info.second.allOutputs)
|
||||
mkBool(*state.allocAttr(infoVal, sAllOutputs), true);
|
||||
if (!info.second.outputs.empty()) {
|
||||
auto & outputsVal = *state.allocAttr(infoVal, state.sOutputs);
|
||||
state.mkList(outputsVal, info.second.outputs.size());
|
||||
size_t i = 0;
|
||||
for (const auto & output : info.second.outputs) {
|
||||
mkString(*(outputsVal.listElems()[i++] = state.allocValue()), output);
|
||||
}
|
||||
}
|
||||
infoVal.attrs->sort();
|
||||
}
|
||||
v.attrs->sort();
|
||||
}
|
||||
|
||||
static RegisterPrimOp r4("__getContext", 1, prim_getContext);
|
||||
|
||||
|
||||
/* Append the given context to a given string.
|
||||
|
||||
See the commentary above unsafeGetContext for details of the
|
||||
context representation.
|
||||
*/
|
||||
static void prim_appendContext(EvalState & state, const Pos & pos, Value * * args, Value & v)
|
||||
{
|
||||
PathSet context;
|
||||
auto orig = state.forceString(*args[0], context, pos);
|
||||
|
||||
state.forceAttrs(*args[1], pos);
|
||||
|
||||
auto sPath = state.symbols.create("path");
|
||||
auto sAllOutputs = state.symbols.create("allOutputs");
|
||||
for (auto & i : *args[1]->attrs) {
|
||||
if (!state.store->isStorePath(i.name))
|
||||
throw EvalError("Context key '%s' is not a store path, at %s", i.name, i.pos);
|
||||
if (!settings.readOnlyMode)
|
||||
state.store->ensurePath(i.name);
|
||||
state.forceAttrs(*i.value, *i.pos);
|
||||
auto iter = i.value->attrs->find(sPath);
|
||||
if (iter != i.value->attrs->end()) {
|
||||
if (state.forceBool(*iter->value, *iter->pos))
|
||||
context.insert(i.name);
|
||||
}
|
||||
|
||||
iter = i.value->attrs->find(sAllOutputs);
|
||||
if (iter != i.value->attrs->end()) {
|
||||
if (state.forceBool(*iter->value, *iter->pos)) {
|
||||
if (!isDerivation(i.name)) {
|
||||
throw EvalError("Tried to add all-outputs context of %s, which is not a derivation, to a string, at %s", i.name, i.pos);
|
||||
}
|
||||
context.insert("=" + string(i.name));
|
||||
}
|
||||
}
|
||||
|
||||
iter = i.value->attrs->find(state.sOutputs);
|
||||
if (iter != i.value->attrs->end()) {
|
||||
state.forceList(*iter->value, *iter->pos);
|
||||
if (iter->value->listSize() && !isDerivation(i.name)) {
|
||||
throw EvalError("Tried to add derivation output context of %s, which is not a derivation, to a string, at %s", i.name, i.pos);
|
||||
}
|
||||
for (unsigned int n = 0; n < iter->value->listSize(); ++n) {
|
||||
auto name = state.forceStringNoCtx(*iter->value->listElems()[n], *iter->pos);
|
||||
context.insert("!" + name + "!" + string(i.name));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
mkString(v, orig, context);
|
||||
}
|
||||
|
||||
static RegisterPrimOp r5("__appendContext", 2, prim_appendContext);
|
||||
|
||||
}
|
|
@ -614,6 +614,22 @@ struct CurlDownloader : public Downloader
|
|||
writeFull(wakeupPipe.writeSide.get(), " ");
|
||||
}
|
||||
|
||||
#ifdef ENABLE_S3
|
||||
std::tuple<std::string, std::string, Store::Params> parseS3Uri(std::string uri)
|
||||
{
|
||||
auto [path, params] = splitUriAndParams(uri);
|
||||
|
||||
auto slash = path.find('/', 5); // 5 is the length of "s3://" prefix
|
||||
if (slash == std::string::npos)
|
||||
throw nix::Error("bad S3 URI '%s'", path);
|
||||
|
||||
std::string bucketName(path, 5, slash - 5);
|
||||
std::string key(path, slash + 1);
|
||||
|
||||
return {bucketName, key, params};
|
||||
}
|
||||
#endif
|
||||
|
||||
void enqueueDownload(const DownloadRequest & request,
|
||||
Callback<DownloadResult> callback) override
|
||||
{
|
||||
|
@ -622,12 +638,15 @@ struct CurlDownloader : public Downloader
|
|||
// FIXME: do this on a worker thread
|
||||
try {
|
||||
#ifdef ENABLE_S3
|
||||
S3Helper s3Helper("", Aws::Region::US_EAST_1, "", ""); // FIXME: make configurable
|
||||
auto slash = request.uri.find('/', 5);
|
||||
if (slash == std::string::npos)
|
||||
throw nix::Error("bad S3 URI '%s'", request.uri);
|
||||
std::string bucketName(request.uri, 5, slash - 5);
|
||||
std::string key(request.uri, slash + 1);
|
||||
auto [bucketName, key, params] = parseS3Uri(request.uri);
|
||||
|
||||
std::string profile = get(params, "profile", "");
|
||||
std::string region = get(params, "region", Aws::Region::US_EAST_1);
|
||||
std::string scheme = get(params, "scheme", "");
|
||||
std::string endpoint = get(params, "endpoint", "");
|
||||
|
||||
S3Helper s3Helper(profile, region, scheme, endpoint);
|
||||
|
||||
// FIXME: implement ETag
|
||||
auto s3Res = s3Helper.getObject(bucketName, key);
|
||||
DownloadResult res;
|
||||
|
|
|
@ -129,8 +129,8 @@ Path LocalFSStore::addPermRoot(const Path & _storePath,
|
|||
check if the root is in a directory in or linked from the
|
||||
gcroots directory. */
|
||||
if (settings.checkRootReachability) {
|
||||
Roots roots = findRoots();
|
||||
if (roots.find(gcRoot) == roots.end())
|
||||
Roots roots = findRoots(false);
|
||||
if (roots[storePath].count(gcRoot) == 0)
|
||||
printError(
|
||||
format(
|
||||
"warning: '%1%' is not in a directory where the garbage collector looks for roots; "
|
||||
|
@ -197,10 +197,11 @@ void LocalStore::addTempRoot(const Path & path)
|
|||
}
|
||||
|
||||
|
||||
std::set<std::pair<pid_t, Path>> LocalStore::readTempRoots(FDs & fds)
|
||||
{
|
||||
std::set<std::pair<pid_t, Path>> tempRoots;
|
||||
static std::string censored = "{censored}";
|
||||
|
||||
|
||||
void LocalStore::findTempRoots(FDs & fds, Roots & tempRoots, bool censor)
|
||||
{
|
||||
/* Read the `temproots' directory for per-process temporary root
|
||||
files. */
|
||||
for (auto & i : readDirectory(tempRootsDir)) {
|
||||
|
@ -250,14 +251,12 @@ std::set<std::pair<pid_t, Path>> LocalStore::readTempRoots(FDs & fds)
|
|||
Path root(contents, pos, end - pos);
|
||||
debug("got temporary root '%s'", root);
|
||||
assertStorePath(root);
|
||||
tempRoots.emplace(pid, root);
|
||||
tempRoots[root].emplace(censor ? censored : fmt("{temp:%d}", pid));
|
||||
pos = end + 1;
|
||||
}
|
||||
|
||||
fds.push_back(fd); /* keep open */
|
||||
}
|
||||
|
||||
return tempRoots;
|
||||
}
|
||||
|
||||
|
||||
|
@ -266,7 +265,7 @@ void LocalStore::findRoots(const Path & path, unsigned char type, Roots & roots)
|
|||
auto foundRoot = [&](const Path & path, const Path & target) {
|
||||
Path storePath = toStorePath(target);
|
||||
if (isStorePath(storePath) && isValidPath(storePath))
|
||||
roots[path] = storePath;
|
||||
roots[storePath].emplace(path);
|
||||
else
|
||||
printInfo(format("skipping invalid root from '%1%' to '%2%'") % path % storePath);
|
||||
};
|
||||
|
@ -306,7 +305,7 @@ void LocalStore::findRoots(const Path & path, unsigned char type, Roots & roots)
|
|||
else if (type == DT_REG) {
|
||||
Path storePath = storeDir + "/" + baseNameOf(path);
|
||||
if (isStorePath(storePath) && isValidPath(storePath))
|
||||
roots[path] = storePath;
|
||||
roots[storePath].emplace(path);
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -321,10 +320,8 @@ void LocalStore::findRoots(const Path & path, unsigned char type, Roots & roots)
|
|||
}
|
||||
|
||||
|
||||
Roots LocalStore::findRootsNoTemp()
|
||||
void LocalStore::findRootsNoTemp(Roots & roots, bool censor)
|
||||
{
|
||||
Roots roots;
|
||||
|
||||
/* Process direct roots in {gcroots,profiles}. */
|
||||
findRoots(stateDir + "/" + gcRootsDir, DT_UNKNOWN, roots);
|
||||
findRoots(stateDir + "/profiles", DT_UNKNOWN, roots);
|
||||
|
@ -333,32 +330,22 @@ Roots LocalStore::findRootsNoTemp()
|
|||
NIX_ROOT_FINDER environment variable. This is typically used
|
||||
to add running programs to the set of roots (to prevent them
|
||||
from being garbage collected). */
|
||||
size_t n = 0;
|
||||
for (auto & root : findRuntimeRoots())
|
||||
roots[fmt("{memory:%d}", n++)] = root;
|
||||
|
||||
return roots;
|
||||
findRuntimeRoots(roots, censor);
|
||||
}
|
||||
|
||||
|
||||
Roots LocalStore::findRoots()
|
||||
Roots LocalStore::findRoots(bool censor)
|
||||
{
|
||||
Roots roots = findRootsNoTemp();
|
||||
Roots roots;
|
||||
findRootsNoTemp(roots, censor);
|
||||
|
||||
FDs fds;
|
||||
pid_t prev = -1;
|
||||
size_t n = 0;
|
||||
for (auto & root : readTempRoots(fds)) {
|
||||
if (prev != root.first) n = 0;
|
||||
prev = root.first;
|
||||
roots[fmt("{temp:%d:%d}", root.first, n++)] = root.second;
|
||||
}
|
||||
findTempRoots(fds, roots, censor);
|
||||
|
||||
return roots;
|
||||
}
|
||||
|
||||
|
||||
static void readProcLink(const string & file, StringSet & paths)
|
||||
static void readProcLink(const string & file, Roots & roots)
|
||||
{
|
||||
/* 64 is the starting buffer size gnu readlink uses... */
|
||||
auto bufsiz = ssize_t{64};
|
||||
|
@ -377,8 +364,8 @@ try_again:
|
|||
goto try_again;
|
||||
}
|
||||
if (res > 0 && buf[0] == '/')
|
||||
paths.emplace(static_cast<char *>(buf), res);
|
||||
return;
|
||||
roots[std::string(static_cast<char *>(buf), res)]
|
||||
.emplace(file);
|
||||
}
|
||||
|
||||
static string quoteRegexChars(const string & raw)
|
||||
|
@ -387,20 +374,20 @@ static string quoteRegexChars(const string & raw)
|
|||
return std::regex_replace(raw, specialRegex, R"(\$&)");
|
||||
}
|
||||
|
||||
static void readFileRoots(const char * path, StringSet & paths)
|
||||
static void readFileRoots(const char * path, Roots & roots)
|
||||
{
|
||||
try {
|
||||
paths.emplace(readFile(path));
|
||||
roots[readFile(path)].emplace(path);
|
||||
} catch (SysError & e) {
|
||||
if (e.errNo != ENOENT && e.errNo != EACCES)
|
||||
throw;
|
||||
}
|
||||
}
|
||||
|
||||
PathSet LocalStore::findRuntimeRoots()
|
||||
void LocalStore::findRuntimeRoots(Roots & roots, bool censor)
|
||||
{
|
||||
PathSet roots;
|
||||
StringSet paths;
|
||||
Roots unchecked;
|
||||
|
||||
auto procDir = AutoCloseDir{opendir("/proc")};
|
||||
if (procDir) {
|
||||
struct dirent * ent;
|
||||
|
@ -410,10 +397,10 @@ PathSet LocalStore::findRuntimeRoots()
|
|||
while (errno = 0, ent = readdir(procDir.get())) {
|
||||
checkInterrupt();
|
||||
if (std::regex_match(ent->d_name, digitsRegex)) {
|
||||
readProcLink((format("/proc/%1%/exe") % ent->d_name).str(), paths);
|
||||
readProcLink((format("/proc/%1%/cwd") % ent->d_name).str(), paths);
|
||||
readProcLink(fmt("/proc/%s/exe" ,ent->d_name), unchecked);
|
||||
readProcLink(fmt("/proc/%s/cwd", ent->d_name), unchecked);
|
||||
|
||||
auto fdStr = (format("/proc/%1%/fd") % ent->d_name).str();
|
||||
auto fdStr = fmt("/proc/%s/fd", ent->d_name);
|
||||
auto fdDir = AutoCloseDir(opendir(fdStr.c_str()));
|
||||
if (!fdDir) {
|
||||
if (errno == ENOENT || errno == EACCES)
|
||||
|
@ -422,9 +409,8 @@ PathSet LocalStore::findRuntimeRoots()
|
|||
}
|
||||
struct dirent * fd_ent;
|
||||
while (errno = 0, fd_ent = readdir(fdDir.get())) {
|
||||
if (fd_ent->d_name[0] != '.') {
|
||||
readProcLink((format("%1%/%2%") % fdStr % fd_ent->d_name).str(), paths);
|
||||
}
|
||||
if (fd_ent->d_name[0] != '.')
|
||||
readProcLink(fmt("%s/%s", fdStr, fd_ent->d_name), unchecked);
|
||||
}
|
||||
if (errno) {
|
||||
if (errno == ESRCH)
|
||||
|
@ -434,18 +420,19 @@ PathSet LocalStore::findRuntimeRoots()
|
|||
fdDir.reset();
|
||||
|
||||
try {
|
||||
auto mapLines =
|
||||
tokenizeString<std::vector<string>>(readFile((format("/proc/%1%/maps") % ent->d_name).str(), true), "\n");
|
||||
for (const auto& line : mapLines) {
|
||||
auto mapFile = fmt("/proc/%s/maps", ent->d_name);
|
||||
auto mapLines = tokenizeString<std::vector<string>>(readFile(mapFile, true), "\n");
|
||||
for (const auto & line : mapLines) {
|
||||
auto match = std::smatch{};
|
||||
if (std::regex_match(line, match, mapRegex))
|
||||
paths.emplace(match[1]);
|
||||
unchecked[match[1]].emplace(mapFile);
|
||||
}
|
||||
|
||||
auto envString = readFile((format("/proc/%1%/environ") % ent->d_name).str(), true);
|
||||
auto envFile = fmt("/proc/%s/environ", ent->d_name);
|
||||
auto envString = readFile(envFile, true);
|
||||
auto env_end = std::sregex_iterator{};
|
||||
for (auto i = std::sregex_iterator{envString.begin(), envString.end(), storePathRegex}; i != env_end; ++i)
|
||||
paths.emplace(i->str());
|
||||
unchecked[i->str()].emplace(envFile);
|
||||
} catch (SysError & e) {
|
||||
if (errno == ENOENT || errno == EACCES || errno == ESRCH)
|
||||
continue;
|
||||
|
@ -465,7 +452,7 @@ PathSet LocalStore::findRuntimeRoots()
|
|||
for (const auto & line : lsofLines) {
|
||||
std::smatch match;
|
||||
if (std::regex_match(line, match, lsofRegex))
|
||||
paths.emplace(match[1]);
|
||||
unchecked[match[1]].emplace("{lsof}");
|
||||
}
|
||||
} catch (ExecError & e) {
|
||||
/* lsof not installed, lsof failed */
|
||||
|
@ -473,21 +460,23 @@ PathSet LocalStore::findRuntimeRoots()
|
|||
#endif
|
||||
|
||||
#if defined(__linux__)
|
||||
readFileRoots("/proc/sys/kernel/modprobe", paths);
|
||||
readFileRoots("/proc/sys/kernel/fbsplash", paths);
|
||||
readFileRoots("/proc/sys/kernel/poweroff_cmd", paths);
|
||||
readFileRoots("/proc/sys/kernel/modprobe", unchecked);
|
||||
readFileRoots("/proc/sys/kernel/fbsplash", unchecked);
|
||||
readFileRoots("/proc/sys/kernel/poweroff_cmd", unchecked);
|
||||
#endif
|
||||
|
||||
for (auto & i : paths)
|
||||
if (isInStore(i)) {
|
||||
Path path = toStorePath(i);
|
||||
if (roots.find(path) == roots.end() && isStorePath(path) && isValidPath(path)) {
|
||||
for (auto & [target, links] : unchecked) {
|
||||
if (isInStore(target)) {
|
||||
Path path = toStorePath(target);
|
||||
if (isStorePath(path) && isValidPath(path)) {
|
||||
debug(format("got additional root '%1%'") % path);
|
||||
roots.insert(path);
|
||||
if (censor)
|
||||
roots[path].insert(censored);
|
||||
else
|
||||
roots[path].insert(links.begin(), links.end());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return roots;
|
||||
}
|
||||
|
||||
|
||||
|
@ -754,16 +743,20 @@ void LocalStore::collectGarbage(const GCOptions & options, GCResults & results)
|
|||
/* Find the roots. Since we've grabbed the GC lock, the set of
|
||||
permanent roots cannot increase now. */
|
||||
printError(format("finding garbage collector roots..."));
|
||||
Roots rootMap = options.ignoreLiveness ? Roots() : findRootsNoTemp();
|
||||
Roots rootMap;
|
||||
if (!options.ignoreLiveness)
|
||||
findRootsNoTemp(rootMap, true);
|
||||
|
||||
for (auto & i : rootMap) state.roots.insert(i.second);
|
||||
for (auto & i : rootMap) state.roots.insert(i.first);
|
||||
|
||||
/* Read the temporary roots. This acquires read locks on all
|
||||
per-process temporary root files. So after this point no paths
|
||||
can be added to the set of temporary roots. */
|
||||
FDs fds;
|
||||
for (auto & root : readTempRoots(fds))
|
||||
state.tempRoots.insert(root.second);
|
||||
Roots tempRoots;
|
||||
findTempRoots(fds, tempRoots, true);
|
||||
for (auto & root : tempRoots)
|
||||
state.tempRoots.insert(root.first);
|
||||
state.roots.insert(state.tempRoots.begin(), state.tempRoots.end());
|
||||
|
||||
/* After this point the set of roots or temporary roots cannot
|
||||
|
|
|
@ -180,11 +180,11 @@ private:
|
|||
typedef std::shared_ptr<AutoCloseFD> FDPtr;
|
||||
typedef list<FDPtr> FDs;
|
||||
|
||||
std::set<std::pair<pid_t, Path>> readTempRoots(FDs & fds);
|
||||
void findTempRoots(FDs & fds, Roots & roots, bool censor);
|
||||
|
||||
public:
|
||||
|
||||
Roots findRoots() override;
|
||||
Roots findRoots(bool censor) override;
|
||||
|
||||
void collectGarbage(const GCOptions & options, GCResults & results) override;
|
||||
|
||||
|
@ -267,9 +267,9 @@ private:
|
|||
|
||||
void findRoots(const Path & path, unsigned char type, Roots & roots);
|
||||
|
||||
Roots findRootsNoTemp();
|
||||
void findRootsNoTemp(Roots & roots, bool censor);
|
||||
|
||||
PathSet findRuntimeRoots();
|
||||
void findRuntimeRoots(Roots & roots, bool censor);
|
||||
|
||||
void removeUnusedLinks(const GCState & state);
|
||||
|
||||
|
|
|
@ -596,7 +596,7 @@ void RemoteStore::syncWithGC()
|
|||
}
|
||||
|
||||
|
||||
Roots RemoteStore::findRoots()
|
||||
Roots RemoteStore::findRoots(bool censor)
|
||||
{
|
||||
auto conn(getConnection());
|
||||
conn->to << wopFindRoots;
|
||||
|
@ -606,7 +606,7 @@ Roots RemoteStore::findRoots()
|
|||
while (count--) {
|
||||
Path link = readString(conn->from);
|
||||
Path target = readStorePath(*this, conn->from);
|
||||
result[link] = target;
|
||||
result[target].emplace(link);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
|
|
@ -82,7 +82,7 @@ public:
|
|||
|
||||
void syncWithGC() override;
|
||||
|
||||
Roots findRoots() override;
|
||||
Roots findRoots(bool censor) override;
|
||||
|
||||
void collectGarbage(const GCOptions & options, GCResults & results) override;
|
||||
|
||||
|
|
|
@ -126,6 +126,7 @@ ref<Aws::Client::ClientConfiguration> S3Helper::makeConfig(const string & region
|
|||
res->endpointOverride = endpoint;
|
||||
}
|
||||
res->requestTimeoutMs = 600 * 1000;
|
||||
res->connectTimeoutMs = 5 * 1000;
|
||||
res->retryStrategy = std::make_shared<RetryStrategy>();
|
||||
res->caFile = settings.caFile;
|
||||
return res;
|
||||
|
|
|
@ -842,12 +842,11 @@ namespace nix {
|
|||
|
||||
RegisterStoreImplementation::Implementations * RegisterStoreImplementation::implementations = 0;
|
||||
|
||||
|
||||
ref<Store> openStore(const std::string & uri_,
|
||||
const Store::Params & extraParams)
|
||||
/* Split URI into protocol+hierarchy part and its parameter set. */
|
||||
std::pair<std::string, Store::Params> splitUriAndParams(const std::string & uri_)
|
||||
{
|
||||
auto uri(uri_);
|
||||
Store::Params params(extraParams);
|
||||
Store::Params params;
|
||||
auto q = uri.find('?');
|
||||
if (q != std::string::npos) {
|
||||
for (auto s : tokenizeString<Strings>(uri.substr(q + 1), "&")) {
|
||||
|
@ -873,6 +872,15 @@ ref<Store> openStore(const std::string & uri_,
|
|||
}
|
||||
uri = uri_.substr(0, q);
|
||||
}
|
||||
return {uri, params};
|
||||
}
|
||||
|
||||
ref<Store> openStore(const std::string & uri_,
|
||||
const Store::Params & extraParams)
|
||||
{
|
||||
auto [uri, uriParams] = splitUriAndParams(uri_);
|
||||
auto params = extraParams;
|
||||
params.insert(uriParams.begin(), uriParams.end());
|
||||
|
||||
for (auto fun : *RegisterStoreImplementation::implementations) {
|
||||
auto store = fun(uri, params);
|
||||
|
|
|
@ -11,6 +11,8 @@
|
|||
#include <atomic>
|
||||
#include <limits>
|
||||
#include <map>
|
||||
#include <unordered_map>
|
||||
#include <unordered_set>
|
||||
#include <memory>
|
||||
#include <string>
|
||||
|
||||
|
@ -47,7 +49,7 @@ const size_t storePathHashLen = 32; // i.e. 160 bits
|
|||
const uint32_t exportMagic = 0x4558494e;
|
||||
|
||||
|
||||
typedef std::map<Path, Path> Roots;
|
||||
typedef std::unordered_map<Path, std::unordered_set<std::string>> Roots;
|
||||
|
||||
|
||||
struct GCOptions
|
||||
|
@ -483,8 +485,10 @@ public:
|
|||
|
||||
/* Find the roots of the garbage collector. Each root is a pair
|
||||
(link, storepath) where `link' is the path of the symlink
|
||||
outside of the Nix store that point to `storePath'. */
|
||||
virtual Roots findRoots()
|
||||
outside of the Nix store that point to `storePath'. If
|
||||
'censor' is true, privacy-sensitive information about roots
|
||||
found in /proc is censored. */
|
||||
virtual Roots findRoots(bool censor)
|
||||
{ unsupported("findRoots"); }
|
||||
|
||||
/* Perform a garbage collection. */
|
||||
|
@ -798,4 +802,8 @@ ValidPathInfo decodeValidPathInfo(std::istream & str,
|
|||
for paths created by makeFixedOutputPath() / addToStore(). */
|
||||
std::string makeFixedOutputCA(bool recursive, const Hash & hash);
|
||||
|
||||
|
||||
/* Split URI into protocol+hierarchy part and its parameter set. */
|
||||
std::pair<std::string, Store::Params> splitUriAndParams(const std::string & uri);
|
||||
|
||||
}
|
||||
|
|
|
@ -331,7 +331,7 @@ struct RestoreSink : ParseSink
|
|||
filesystem doesn't support preallocation (e.g. on
|
||||
OpenSolaris). Since preallocation is just an
|
||||
optimisation, ignore it. */
|
||||
if (errno && errno != EINVAL)
|
||||
if (errno && errno != EINVAL && errno != EOPNOTSUPP && errno != ENOSYS)
|
||||
throw SysError(format("preallocating file of %1% bytes") % len);
|
||||
}
|
||||
#endif
|
||||
|
|
|
@ -475,11 +475,19 @@ static void performOp(TunnelLogger * logger, ref<Store> store,
|
|||
|
||||
case wopFindRoots: {
|
||||
logger->startWork();
|
||||
Roots roots = store->findRoots();
|
||||
Roots roots = store->findRoots(!trusted);
|
||||
logger->stopWork();
|
||||
to << roots.size();
|
||||
|
||||
size_t size = 0;
|
||||
for (auto & i : roots)
|
||||
to << i.first << i.second;
|
||||
size += i.second.size();
|
||||
|
||||
to << size;
|
||||
|
||||
for (auto & [target, links] : roots)
|
||||
for (auto & link : links)
|
||||
to << link << target;
|
||||
|
||||
break;
|
||||
}
|
||||
|
||||
|
|
|
@ -427,10 +427,11 @@ static void opQuery(Strings opFlags, Strings opArgs)
|
|||
maybeUseOutputs(store->followLinksToStorePath(i), useOutput, forceRealise),
|
||||
referrers, true, settings.gcKeepOutputs, settings.gcKeepDerivations);
|
||||
}
|
||||
Roots roots = store->findRoots();
|
||||
for (auto & i : roots)
|
||||
if (referrers.find(i.second) != referrers.end())
|
||||
cout << format("%1%\n") % i.first;
|
||||
Roots roots = store->findRoots(false);
|
||||
for (auto & [target, links] : roots)
|
||||
if (referrers.find(target) != referrers.end())
|
||||
for (auto & link : links)
|
||||
cout << format("%1% -> %2%\n") % link % target;
|
||||
break;
|
||||
}
|
||||
|
||||
|
@ -485,11 +486,16 @@ static void opReadLog(Strings opFlags, Strings opArgs)
|
|||
static void opDumpDB(Strings opFlags, Strings opArgs)
|
||||
{
|
||||
if (!opFlags.empty()) throw UsageError("unknown flag");
|
||||
if (!opArgs.empty())
|
||||
throw UsageError("no arguments expected");
|
||||
if (!opArgs.empty()) {
|
||||
for (auto & i : opArgs)
|
||||
i = store->followLinksToStorePath(i);
|
||||
for (auto & i : opArgs)
|
||||
cout << store->makeValidityRegistration({i}, true, true);
|
||||
} else {
|
||||
PathSet validPaths = store->queryAllValidPaths();
|
||||
for (auto & i : validPaths)
|
||||
cout << store->makeValidityRegistration({i}, true, true);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
@ -585,9 +591,14 @@ static void opGC(Strings opFlags, Strings opArgs)
|
|||
if (!opArgs.empty()) throw UsageError("no arguments expected");
|
||||
|
||||
if (printRoots) {
|
||||
Roots roots = store->findRoots();
|
||||
for (auto & i : roots)
|
||||
cout << i.first << " -> " << i.second << std::endl;
|
||||
Roots roots = store->findRoots(false);
|
||||
std::set<std::pair<Path, Path>> roots2;
|
||||
// Transpose and sort the roots.
|
||||
for (auto & [target, links] : roots)
|
||||
for (auto & link : links)
|
||||
roots2.emplace(link, target);
|
||||
for (auto & [link, target] : roots2)
|
||||
std::cout << link << " -> " << target << "\n";
|
||||
}
|
||||
|
||||
else {
|
||||
|
|
|
@ -7,7 +7,7 @@ outPath=$(nix-store -rvv "$drvPath")
|
|||
rm -f "$NIX_STATE_DIR"/gcroots/foo
|
||||
ln -sf $outPath "$NIX_STATE_DIR"/gcroots/foo
|
||||
|
||||
[ "$(nix-store -q --roots $outPath)" = "$NIX_STATE_DIR"/gcroots/foo ]
|
||||
[ "$(nix-store -q --roots $outPath)" = "$NIX_STATE_DIR/gcroots/foo -> $outPath" ]
|
||||
|
||||
nix-store --gc --print-roots | grep $outPath
|
||||
nix-store --gc --print-live | grep $outPath
|
||||
|
|
1
tests/lang/eval-okay-context-introspection.exp
Normal file
1
tests/lang/eval-okay-context-introspection.exp
Normal file
|
@ -0,0 +1 @@
|
|||
true
|
24
tests/lang/eval-okay-context-introspection.nix
Normal file
24
tests/lang/eval-okay-context-introspection.nix
Normal file
|
@ -0,0 +1,24 @@
|
|||
let
|
||||
drv = derivation {
|
||||
name = "fail";
|
||||
builder = "/bin/false";
|
||||
system = "x86_64-linux";
|
||||
outputs = [ "out" "foo" ];
|
||||
};
|
||||
|
||||
path = "${./eval-okay-context-introspection.nix}";
|
||||
|
||||
desired-context = {
|
||||
"${builtins.unsafeDiscardStringContext path}" = {
|
||||
path = true;
|
||||
};
|
||||
"${builtins.unsafeDiscardStringContext drv.drvPath}" = {
|
||||
outputs = [ "foo" "out" ];
|
||||
allOutputs = true;
|
||||
};
|
||||
};
|
||||
|
||||
legit-context = builtins.getContext "${path}${drv.outPath}${drv.foo.outPath}${drv.drvPath}";
|
||||
|
||||
constructed-context = builtins.getContext (builtins.appendContext "" desired-context);
|
||||
in legit-context == constructed-context
|
|
@ -20,6 +20,8 @@ with builtins;
|
|||
(isFloat (1 - 2.0))
|
||||
(isBool (true && false))
|
||||
(isBool null)
|
||||
(isPath /nix/store)
|
||||
(isPath ./.)
|
||||
(isAttrs { x = 123; })
|
||||
(isAttrs null)
|
||||
(typeOf (3 * 4))
|
||||
|
|
Loading…
Reference in a new issue