mirror of
https://github.com/privatevoid-net/nix-super.git
synced 2024-11-26 15:56:18 +02:00
Add support for passing structured data to builders
Previously, all derivation attributes had to be coerced into strings so that they could be passed via the environment. This is lossy (e.g. lists get flattened, necessitating configureFlags vs. configureFlagsArray, of which the latter cannot be specified as an attribute), doesn't support attribute sets at all, and has size limitations (necessitating hacks like passAsFile). This patch adds a new mode for passing attributes to builders, namely encoded as a JSON file ".attrs.json" in the current directory of the builder. This mode is activated via the special attribute __structuredAttrs = true; (The idea is that one day we can set this in stdenv.mkDerivation.) For example, stdenv.mkDerivation { __structuredAttrs = true; name = "foo"; buildInputs = [ pkgs.hello pkgs.cowsay ]; doCheck = true; hardening.format = false; } results in a ".attrs.json" file containing (sans the indentation): { "buildInputs": [], "builder": "/nix/store/ygl61ycpr2vjqrx775l1r2mw1g2rb754-bash-4.3-p48/bin/bash", "configureFlags": [ "--with-foo", "--with-bar=1 2" ], "doCheck": true, "hardening": { "format": false }, "name": "foo", "nativeBuildInputs": [ "/nix/store/10h6li26i7g6z3mdpvra09yyf10mmzdr-hello-2.10", "/nix/store/4jnvjin0r6wp6cv1hdm5jbkx3vinlcvk-cowsay-3.03" ], "propagatedBuildInputs": [], "propagatedNativeBuildInputs": [], "stdenv": "/nix/store/f3hw3p8armnzy6xhd4h8s7anfjrs15n2-stdenv", "system": "x86_64-linux" } "passAsFile" is ignored in this mode because it's not needed - large strings are included directly in the JSON representation. It is up to the builder to do something with the JSON representation. For example, in bash-based builders, lists/attrsets of string values could be mapped to bash (associative) arrays.
This commit is contained in:
parent
54801ed6ad
commit
6de33a9c67
4 changed files with 130 additions and 58 deletions
|
@ -291,6 +291,8 @@ EvalState::EvalState(const Strings & _searchPath, ref<Store> store)
|
|||
, sToString(symbols.create("__toString"))
|
||||
, sRight(symbols.create("right"))
|
||||
, sWrong(symbols.create("wrong"))
|
||||
, sStructuredAttrs(symbols.create("__structuredAttrs"))
|
||||
, sBuilder(symbols.create("builder"))
|
||||
, store(store)
|
||||
, baseEnv(allocEnv(128))
|
||||
, staticBaseEnv(false, 0)
|
||||
|
|
|
@ -68,7 +68,7 @@ public:
|
|||
const Symbol sWith, sOutPath, sDrvPath, sType, sMeta, sName, sValue,
|
||||
sSystem, sOverrides, sOutputs, sOutputName, sIgnoreNulls,
|
||||
sFile, sLine, sColumn, sFunctor, sToString,
|
||||
sRight, sWrong;
|
||||
sRight, sWrong, sStructuredAttrs, sBuilder;
|
||||
Symbol sDerivationNix;
|
||||
|
||||
/* If set, force copying files to the Nix store even if they
|
||||
|
|
|
@ -8,6 +8,7 @@
|
|||
#include "names.hh"
|
||||
#include "store-api.hh"
|
||||
#include "util.hh"
|
||||
#include "json.hh"
|
||||
#include "value-to-json.hh"
|
||||
#include "value-to-xml.hh"
|
||||
#include "primops.hh"
|
||||
|
@ -474,6 +475,13 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
|
|||
throw;
|
||||
}
|
||||
|
||||
/* Check whether attributes should be passed as a JSON file. */
|
||||
std::ostringstream jsonBuf;
|
||||
std::unique_ptr<JSONObject> jsonObject;
|
||||
attr = args[0]->attrs->find(state.sStructuredAttrs);
|
||||
if (attr != args[0]->attrs->end() && state.forceBool(*attr->value, pos))
|
||||
jsonObject = std::make_unique<JSONObject>(jsonBuf);
|
||||
|
||||
/* Check whether null attributes should be ignored. */
|
||||
bool ignoreNulls = false;
|
||||
attr = args[0]->attrs->find(state.sIgnoreNulls);
|
||||
|
@ -491,50 +499,20 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
|
|||
StringSet outputs;
|
||||
outputs.insert("out");
|
||||
|
||||
for (auto & i : *args[0]->attrs) {
|
||||
if (i.name == state.sIgnoreNulls) continue;
|
||||
string key = i.name;
|
||||
for (auto & i : args[0]->attrs->lexicographicOrder()) {
|
||||
if (i->name == state.sIgnoreNulls) continue;
|
||||
string key = i->name;
|
||||
Activity act(*logger, lvlVomit, format("processing attribute ‘%1%’") % key);
|
||||
|
||||
try {
|
||||
|
||||
if (ignoreNulls) {
|
||||
state.forceValue(*i.value);
|
||||
if (i.value->type == tNull) continue;
|
||||
}
|
||||
|
||||
/* The `args' attribute is special: it supplies the
|
||||
command-line arguments to the builder. */
|
||||
if (key == "args") {
|
||||
state.forceList(*i.value, pos);
|
||||
for (unsigned int n = 0; n < i.value->listSize(); ++n) {
|
||||
string s = state.coerceToString(posDrvName, *i.value->listElems()[n], context, true);
|
||||
drv.args.push_back(s);
|
||||
}
|
||||
}
|
||||
|
||||
/* All other attributes are passed to the builder through
|
||||
the environment. */
|
||||
else {
|
||||
string s = state.coerceToString(posDrvName, *i.value, context, true);
|
||||
drv.env[key] = s;
|
||||
if (key == "builder") drv.builder = s;
|
||||
else if (i.name == state.sSystem) drv.platform = s;
|
||||
else if (i.name == state.sName) {
|
||||
drvName = s;
|
||||
printMsg(lvlVomit, format("derivation name is ‘%1%’") % drvName);
|
||||
}
|
||||
else if (key == "outputHash") outputHash = s;
|
||||
else if (key == "outputHashAlgo") outputHashAlgo = s;
|
||||
else if (key == "outputHashMode") {
|
||||
auto handleHashMode = [&](const std::string & s) {
|
||||
if (s == "recursive") outputHashRecursive = true;
|
||||
else if (s == "flat") outputHashRecursive = false;
|
||||
else throw EvalError(format("invalid value ‘%1%’ for ‘outputHashMode’ attribute, at %2%") % s % posDrvName);
|
||||
}
|
||||
else if (key == "outputs") {
|
||||
Strings tmp = tokenizeString<Strings>(s);
|
||||
else throw EvalError("invalid value ‘%s’ for ‘outputHashMode’ attribute, at %s", s, posDrvName);
|
||||
};
|
||||
|
||||
auto handleOutputs = [&](const Strings & ss) {
|
||||
outputs.clear();
|
||||
for (auto & j : tmp) {
|
||||
for (auto & j : ss) {
|
||||
if (outputs.find(j) != outputs.end())
|
||||
throw EvalError(format("duplicate derivation output ‘%1%’, at %2%") % j % posDrvName);
|
||||
/* !!! Check whether j is a valid attribute
|
||||
|
@ -548,7 +526,73 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
|
|||
}
|
||||
if (outputs.empty())
|
||||
throw EvalError(format("derivation cannot have an empty set of outputs, at %1%") % posDrvName);
|
||||
};
|
||||
|
||||
try {
|
||||
|
||||
if (ignoreNulls) {
|
||||
state.forceValue(*i->value);
|
||||
if (i->value->type == tNull) continue;
|
||||
}
|
||||
|
||||
/* The `args' attribute is special: it supplies the
|
||||
command-line arguments to the builder. */
|
||||
if (key == "args") {
|
||||
state.forceList(*i->value, pos);
|
||||
for (unsigned int n = 0; n < i->value->listSize(); ++n) {
|
||||
string s = state.coerceToString(posDrvName, *i->value->listElems()[n], context, true);
|
||||
drv.args.push_back(s);
|
||||
}
|
||||
}
|
||||
|
||||
/* All other attributes are passed to the builder through
|
||||
the environment. */
|
||||
else {
|
||||
|
||||
if (jsonObject) {
|
||||
|
||||
if (i->name == state.sStructuredAttrs) continue;
|
||||
|
||||
auto placeholder(jsonObject->placeholder(key));
|
||||
printValueAsJSON(state, true, *i->value, placeholder, context);
|
||||
|
||||
if (i->name == state.sBuilder)
|
||||
drv.builder = state.forceString(*i->value, context, posDrvName);
|
||||
else if (i->name == state.sSystem)
|
||||
drv.platform = state.forceStringNoCtx(*i->value, posDrvName);
|
||||
else if (i->name == state.sName)
|
||||
drvName = state.forceStringNoCtx(*i->value, posDrvName);
|
||||
else if (key == "outputHash")
|
||||
outputHash = state.forceStringNoCtx(*i->value, posDrvName);
|
||||
else if (key == "outputHashAlgo")
|
||||
outputHashAlgo = state.forceStringNoCtx(*i->value, posDrvName);
|
||||
else if (key == "outputHashMode")
|
||||
handleHashMode(state.forceStringNoCtx(*i->value, posDrvName));
|
||||
else if (key == "outputs") {
|
||||
/* Require ‘outputs’ to be a list of strings. */
|
||||
state.forceList(*i->value, posDrvName);
|
||||
Strings ss;
|
||||
for (unsigned int n = 0; n < i->value->listSize(); ++n)
|
||||
ss.emplace_back(state.forceStringNoCtx(*i->value->listElems()[n], posDrvName));
|
||||
handleOutputs(ss);
|
||||
}
|
||||
|
||||
} else {
|
||||
auto s = state.coerceToString(posDrvName, *i->value, context, true);
|
||||
drv.env.emplace(key, s);
|
||||
if (i->name == state.sBuilder) drv.builder = s;
|
||||
else if (i->name == state.sSystem) drv.platform = s;
|
||||
else if (i->name == state.sName) {
|
||||
drvName = s;
|
||||
printMsg(lvlVomit, format("derivation name is ‘%1%’") % drvName);
|
||||
}
|
||||
else if (key == "outputHash") outputHash = s;
|
||||
else if (key == "outputHashAlgo") outputHashAlgo = s;
|
||||
else if (key == "outputHashMode") handleHashMode(s);
|
||||
else if (key == "outputs")
|
||||
handleOutputs(tokenizeString<Strings>(s));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
} catch (Error & e) {
|
||||
|
@ -558,6 +602,11 @@ static void prim_derivationStrict(EvalState & state, const Pos & pos, Value * *
|
|||
}
|
||||
}
|
||||
|
||||
if (jsonObject) {
|
||||
jsonObject.reset();
|
||||
drv.env.emplace("__json", jsonBuf.str());
|
||||
}
|
||||
|
||||
/* Everything in the context of the strings in the derivation
|
||||
attributes should be added as dependencies of the resulting
|
||||
derivation. */
|
||||
|
|
|
@ -865,6 +865,9 @@ private:
|
|||
/* Fill in the environment for the builder. */
|
||||
void initEnv();
|
||||
|
||||
/* Write a JSON file containing the derivation attributes. */
|
||||
void writeStructuredAttrs();
|
||||
|
||||
/* Make a file owned by the builder. */
|
||||
void chownToBuilder(const Path & path);
|
||||
|
||||
|
@ -1726,13 +1729,15 @@ void DerivationGoal::startBuilder()
|
|||
tmpDirInSandbox = useChroot ? canonPath("/tmp", true) + "/nix-build-" + drvName + "-0" : tmpDir;
|
||||
chownToBuilder(tmpDir);
|
||||
|
||||
/* Construct the environment passed to the builder. */
|
||||
initEnv();
|
||||
|
||||
/* Substitute output placeholders with the actual output paths. */
|
||||
for (auto & output : drv->outputs)
|
||||
inputRewrites[hashPlaceholder(output.first)] = output.second.path;
|
||||
|
||||
/* Construct the environment passed to the builder. */
|
||||
initEnv();
|
||||
|
||||
writeStructuredAttrs();
|
||||
|
||||
/* Handle exportReferencesGraph(), if set. */
|
||||
doExportReferencesGraph();
|
||||
|
||||
|
@ -2148,10 +2153,15 @@ void DerivationGoal::initEnv()
|
|||
/* The maximum number of cores to utilize for parallel building. */
|
||||
env["NIX_BUILD_CORES"] = (format("%d") % settings.buildCores).str();
|
||||
|
||||
/* Add all bindings specified in the derivation via the
|
||||
environments, except those listed in the passAsFile
|
||||
attribute. Those are passed as file names pointing to
|
||||
temporary files containing the contents. */
|
||||
/* In non-structured mode, add all bindings specified in the
|
||||
derivation via the environments, except those listed in the
|
||||
passAsFile attribute. Those are passed as file names pointing
|
||||
to temporary files containing the contents. Note that
|
||||
passAsFile is ignored in structure mode because it's not
|
||||
needed (attributes are not passed through the environment, so
|
||||
there is no size constraint). */
|
||||
if (!drv->env.count("__json")) {
|
||||
|
||||
StringSet passAsFile = tokenizeString<StringSet>(get(drv->env, "passAsFile"));
|
||||
int fileNr = 0;
|
||||
for (auto & i : drv->env) {
|
||||
|
@ -2166,6 +2176,8 @@ void DerivationGoal::initEnv()
|
|||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
/* For convenience, set an environment pointing to the top build
|
||||
directory. */
|
||||
env["NIX_BUILD_TOP"] = tmpDirInSandbox;
|
||||
|
@ -2201,6 +2213,15 @@ void DerivationGoal::initEnv()
|
|||
}
|
||||
|
||||
|
||||
void DerivationGoal::writeStructuredAttrs()
|
||||
{
|
||||
auto json = drv->env.find("__json");
|
||||
if (json == drv->env.end()) return;
|
||||
|
||||
writeFile(tmpDir + "/.attrs.json", rewriteStrings(json->second, inputRewrites));
|
||||
}
|
||||
|
||||
|
||||
void DerivationGoal::chownToBuilder(const Path & path)
|
||||
{
|
||||
if (!buildUser) return;
|
||||
|
|
Loading…
Reference in a new issue