mirror of
https://github.com/privatevoid-net/nix-super.git
synced 2024-11-22 05:56:15 +02:00
commit
c6b5503190
42 changed files with 1045 additions and 44 deletions
53
doc/manual/rl-next/repl-doc-renders-doc-comments.md
Normal file
53
doc/manual/rl-next/repl-doc-renders-doc-comments.md
Normal file
|
@ -0,0 +1,53 @@
|
|||
---
|
||||
synopsis: "`nix-repl`'s `:doc` shows documentation comments"
|
||||
significance: significant
|
||||
issues:
|
||||
- 3904
|
||||
- 10771
|
||||
prs:
|
||||
- 1652
|
||||
- 9054
|
||||
- 11072
|
||||
---
|
||||
|
||||
`nix repl` has a `:doc` command that previously only rendered documentation for internally defined functions.
|
||||
This feature has been extended to also render function documentation comments, in accordance with [RFC 145].
|
||||
|
||||
Example:
|
||||
|
||||
```
|
||||
nix-repl> :doc lib.toFunction
|
||||
Function toFunction
|
||||
… defined at /home/user/h/nixpkgs/lib/trivial.nix:1072:5
|
||||
|
||||
Turns any non-callable values into constant functions. Returns
|
||||
callable values as is.
|
||||
|
||||
Inputs
|
||||
|
||||
v
|
||||
|
||||
: Any value
|
||||
|
||||
Examples
|
||||
|
||||
:::{.example}
|
||||
|
||||
## lib.trivial.toFunction usage example
|
||||
|
||||
| nix-repl> lib.toFunction 1 2
|
||||
| 1
|
||||
|
|
||||
| nix-repl> lib.toFunction (x: x + 1) 2
|
||||
| 3
|
||||
|
||||
:::
|
||||
```
|
||||
|
||||
Known limitations:
|
||||
- It does not render documentation for "formals", such as `{ /** the value to return */ x, ... }: x`.
|
||||
- Some extensions to markdown are not yet supported, as you can see in the example above.
|
||||
|
||||
We'd like to acknowledge Yingchi Long for proposing a proof of concept for this functionality in [#9054](https://github.com/NixOS/nix/pull/9054), as well as @sternenseemann and Johannes Kirschbauer for their contributions, proposals, and their work on [RFC 145].
|
||||
|
||||
[RFC 145]: https://github.com/NixOS/rfcs/pull/145
|
|
@ -91,7 +91,7 @@ A environment variables that Google Test accepts are also worth knowing:
|
|||
Putting the two together, one might run
|
||||
|
||||
```bash
|
||||
GTEST_BREIF=1 GTEST_FILTER='ErrorTraceTest.*' meson test nix-expr-tests -v
|
||||
GTEST_BRIEF=1 GTEST_FILTER='ErrorTraceTest.*' meson test nix-expr-tests -v
|
||||
```
|
||||
|
||||
for short but comprensive output.
|
||||
|
|
|
@ -73,7 +73,7 @@ static int listPossibleCallback(char * s, char *** avp)
|
|||
{
|
||||
auto possible = curRepl->completePrefix(s);
|
||||
|
||||
if (possible.size() > (INT_MAX / sizeof(char *)))
|
||||
if (possible.size() > (std::numeric_limits<int>::max() / sizeof(char *)))
|
||||
throw Error("too many completions");
|
||||
|
||||
int ac = 0;
|
||||
|
|
|
@ -27,6 +27,7 @@
|
|||
#include "local-fs-store.hh"
|
||||
#include "print.hh"
|
||||
#include "ref.hh"
|
||||
#include "value.hh"
|
||||
|
||||
#if HAVE_BOEHMGC
|
||||
#define GC_INCLUDE_NEW
|
||||
|
@ -616,6 +617,38 @@ ProcessLineResult NixRepl::processLine(std::string line)
|
|||
|
||||
else if (command == ":doc") {
|
||||
Value v;
|
||||
|
||||
auto expr = parseString(arg);
|
||||
std::string fallbackName;
|
||||
PosIdx fallbackPos;
|
||||
DocComment fallbackDoc;
|
||||
if (auto select = dynamic_cast<ExprSelect *>(expr)) {
|
||||
Value vAttrs;
|
||||
auto name = select->evalExceptFinalSelect(*state, *env, vAttrs);
|
||||
fallbackName = state->symbols[name];
|
||||
|
||||
state->forceAttrs(vAttrs, noPos, "while evaluating an attribute set to look for documentation");
|
||||
auto attrs = vAttrs.attrs();
|
||||
assert(attrs);
|
||||
auto attr = attrs->get(name);
|
||||
if (!attr) {
|
||||
// When missing, trigger the normal exception
|
||||
// e.g. :doc builtins.foo
|
||||
// behaves like
|
||||
// nix-repl> builtins.foo
|
||||
// error: attribute 'foo' missing
|
||||
evalString(arg, v);
|
||||
assert(false);
|
||||
}
|
||||
if (attr->pos) {
|
||||
fallbackPos = attr->pos;
|
||||
fallbackDoc = state->getDocCommentForPos(fallbackPos);
|
||||
}
|
||||
|
||||
} else {
|
||||
evalString(arg, v);
|
||||
}
|
||||
|
||||
evalString(arg, v);
|
||||
if (auto doc = state->getDoc(v)) {
|
||||
std::string markdown;
|
||||
|
@ -633,6 +666,19 @@ ProcessLineResult NixRepl::processLine(std::string line)
|
|||
markdown += stripIndentation(doc->doc);
|
||||
|
||||
logger->cout(trim(renderMarkdownToTerminal(markdown)));
|
||||
} else if (fallbackPos) {
|
||||
std::stringstream ss;
|
||||
ss << "Attribute `" << fallbackName << "`\n\n";
|
||||
ss << " … defined at " << state->positions[fallbackPos] << "\n\n";
|
||||
if (fallbackDoc) {
|
||||
ss << fallbackDoc.getInnerText(state->positions);
|
||||
} else {
|
||||
ss << "No documentation found.\n\n";
|
||||
}
|
||||
|
||||
auto markdown = ss.str();
|
||||
logger->cout(trim(renderMarkdownToTerminal(markdown)));
|
||||
|
||||
} else
|
||||
throw Error("value does not have documentation");
|
||||
}
|
||||
|
|
|
@ -559,6 +559,54 @@ std::optional<EvalState::Doc> EvalState::getDoc(Value & v)
|
|||
.doc = doc,
|
||||
};
|
||||
}
|
||||
if (v.isLambda()) {
|
||||
auto exprLambda = v.payload.lambda.fun;
|
||||
|
||||
std::stringstream s(std::ios_base::out);
|
||||
std::string name;
|
||||
auto pos = positions[exprLambda->getPos()];
|
||||
std::string docStr;
|
||||
|
||||
if (exprLambda->name) {
|
||||
name = symbols[exprLambda->name];
|
||||
}
|
||||
|
||||
if (exprLambda->docComment) {
|
||||
docStr = exprLambda->docComment.getInnerText(positions);
|
||||
}
|
||||
|
||||
if (name.empty()) {
|
||||
s << "Function ";
|
||||
}
|
||||
else {
|
||||
s << "Function `" << name << "`";
|
||||
if (pos)
|
||||
s << "\\\n … " ;
|
||||
else
|
||||
s << "\\\n";
|
||||
}
|
||||
if (pos) {
|
||||
s << "defined at " << pos;
|
||||
}
|
||||
if (!docStr.empty()) {
|
||||
s << "\n\n";
|
||||
}
|
||||
|
||||
s << docStr;
|
||||
|
||||
s << '\0'; // for making a c string below
|
||||
std::string ss = s.str();
|
||||
|
||||
return Doc {
|
||||
.pos = pos,
|
||||
.name = name,
|
||||
.arity = 0, // FIXME: figure out how deep by syntax only? It's not semantically useful though...
|
||||
.args = {},
|
||||
.doc =
|
||||
// FIXME: this leaks; make the field std::string?
|
||||
strdup(ss.data()),
|
||||
};
|
||||
}
|
||||
return {};
|
||||
}
|
||||
|
||||
|
@ -1367,6 +1415,22 @@ void ExprSelect::eval(EvalState & state, Env & env, Value & v)
|
|||
v = *vAttrs;
|
||||
}
|
||||
|
||||
Symbol ExprSelect::evalExceptFinalSelect(EvalState & state, Env & env, Value & attrs)
|
||||
{
|
||||
Value vTmp;
|
||||
Symbol name = getName(attrPath[attrPath.size() - 1], state, env);
|
||||
|
||||
if (attrPath.size() == 1) {
|
||||
e->eval(state, env, vTmp);
|
||||
} else {
|
||||
ExprSelect init(*this);
|
||||
init.attrPath.pop_back();
|
||||
init.eval(state, env, vTmp);
|
||||
}
|
||||
attrs = vTmp;
|
||||
return name;
|
||||
}
|
||||
|
||||
|
||||
void ExprOpHasAttr::eval(EvalState & state, Env & env, Value & v)
|
||||
{
|
||||
|
@ -2828,13 +2892,37 @@ Expr * EvalState::parse(
|
|||
const SourcePath & basePath,
|
||||
std::shared_ptr<StaticEnv> & staticEnv)
|
||||
{
|
||||
auto result = parseExprFromBuf(text, length, origin, basePath, symbols, settings, positions, rootFS, exprSymbols);
|
||||
DocCommentMap tmpDocComments; // Only used when not origin is not a SourcePath
|
||||
DocCommentMap *docComments = &tmpDocComments;
|
||||
|
||||
if (auto sourcePath = std::get_if<SourcePath>(&origin)) {
|
||||
auto [it, _] = positionToDocComment.try_emplace(*sourcePath);
|
||||
docComments = &it->second;
|
||||
}
|
||||
|
||||
auto result = parseExprFromBuf(text, length, origin, basePath, symbols, settings, positions, *docComments, rootFS, exprSymbols);
|
||||
|
||||
result->bindVars(*this, staticEnv);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
DocComment EvalState::getDocCommentForPos(PosIdx pos)
|
||||
{
|
||||
auto pos2 = positions[pos];
|
||||
auto path = pos2.getSourcePath();
|
||||
if (!path)
|
||||
return {};
|
||||
|
||||
auto table = positionToDocComment.find(*path);
|
||||
if (table == positionToDocComment.end())
|
||||
return {};
|
||||
|
||||
auto it = table->second.find(pos);
|
||||
if (it == table->second.end())
|
||||
return {};
|
||||
return it->second;
|
||||
}
|
||||
|
||||
std::string ExternalValueBase::coerceToString(EvalState & state, const PosIdx & pos, NixStringContext & context, bool copyMore, bool copyToStore) const
|
||||
{
|
||||
|
|
|
@ -130,6 +130,8 @@ struct Constant
|
|||
typedef std::map<std::string, Value *> ValMap;
|
||||
#endif
|
||||
|
||||
typedef std::map<PosIdx, DocComment> DocCommentMap;
|
||||
|
||||
struct Env
|
||||
{
|
||||
Env * up;
|
||||
|
@ -329,6 +331,12 @@ private:
|
|||
#endif
|
||||
FileEvalCache fileEvalCache;
|
||||
|
||||
/**
|
||||
* Associate source positions of certain AST nodes with their preceding doc comment, if they have one.
|
||||
* Grouped by file.
|
||||
*/
|
||||
std::map<SourcePath, DocCommentMap> positionToDocComment;
|
||||
|
||||
LookupPath lookupPath;
|
||||
|
||||
std::map<std::string, std::optional<std::string>> lookupPathResolved;
|
||||
|
@ -771,6 +779,8 @@ public:
|
|||
std::string_view pathArg,
|
||||
PosIdx pos);
|
||||
|
||||
DocComment getDocCommentForPos(PosIdx pos);
|
||||
|
||||
private:
|
||||
|
||||
/**
|
||||
|
|
30
src/libexpr/lexer-helpers.cc
Normal file
30
src/libexpr/lexer-helpers.cc
Normal file
|
@ -0,0 +1,30 @@
|
|||
#include "lexer-tab.hh"
|
||||
#include "lexer-helpers.hh"
|
||||
#include "parser-tab.hh"
|
||||
|
||||
void nix::lexer::internal::initLoc(YYLTYPE * loc)
|
||||
{
|
||||
loc->beginOffset = loc->endOffset = 0;
|
||||
}
|
||||
|
||||
void nix::lexer::internal::adjustLoc(yyscan_t yyscanner, YYLTYPE * loc, const char * s, size_t len)
|
||||
{
|
||||
loc->stash();
|
||||
|
||||
LexerState & lexerState = *yyget_extra(yyscanner);
|
||||
|
||||
if (lexerState.docCommentDistance == 1) {
|
||||
// Preceding token was a doc comment.
|
||||
ParserLocation doc;
|
||||
doc.beginOffset = lexerState.lastDocCommentLoc.beginOffset;
|
||||
ParserLocation docEnd;
|
||||
docEnd.beginOffset = lexerState.lastDocCommentLoc.endOffset;
|
||||
DocComment docComment{lexerState.at(doc), lexerState.at(docEnd)};
|
||||
PosIdx locPos = lexerState.at(*loc);
|
||||
lexerState.positionToDocComment.emplace(locPos, docComment);
|
||||
}
|
||||
lexerState.docCommentDistance++;
|
||||
|
||||
loc->beginOffset = loc->endOffset;
|
||||
loc->endOffset += len;
|
||||
}
|
9
src/libexpr/lexer-helpers.hh
Normal file
9
src/libexpr/lexer-helpers.hh
Normal file
|
@ -0,0 +1,9 @@
|
|||
#pragma once
|
||||
|
||||
namespace nix::lexer::internal {
|
||||
|
||||
void initLoc(YYLTYPE * loc);
|
||||
|
||||
void adjustLoc(yyscan_t yyscanner, YYLTYPE * loc, const char * s, size_t len);
|
||||
|
||||
} // namespace nix::lexer
|
|
@ -5,7 +5,7 @@
|
|||
%option stack
|
||||
%option nodefault
|
||||
%option nounput noyy_top_state
|
||||
|
||||
%option extra-type="::nix::LexerState *"
|
||||
|
||||
%s DEFAULT
|
||||
%x STRING
|
||||
|
@ -14,6 +14,10 @@
|
|||
%x INPATH_SLASH
|
||||
%x PATH_START
|
||||
|
||||
%top {
|
||||
#include "parser-tab.hh" // YYSTYPE
|
||||
#include "parser-state.hh"
|
||||
}
|
||||
|
||||
%{
|
||||
#ifdef __clang__
|
||||
|
@ -22,28 +26,19 @@
|
|||
|
||||
#include "nixexpr.hh"
|
||||
#include "parser-tab.hh"
|
||||
#include "lexer-helpers.hh"
|
||||
|
||||
namespace nix {
|
||||
struct LexerState;
|
||||
}
|
||||
|
||||
using namespace nix;
|
||||
using namespace nix::lexer::internal;
|
||||
|
||||
namespace nix {
|
||||
|
||||
#define CUR_POS state->at(*yylloc)
|
||||
|
||||
static void initLoc(YYLTYPE * loc)
|
||||
{
|
||||
loc->first_line = loc->last_line = 0;
|
||||
loc->first_column = loc->last_column = 0;
|
||||
}
|
||||
|
||||
static void adjustLoc(YYLTYPE * loc, const char * s, size_t len)
|
||||
{
|
||||
loc->stash();
|
||||
|
||||
loc->first_column = loc->last_column;
|
||||
loc->last_column += len;
|
||||
}
|
||||
|
||||
|
||||
// we make use of the fact that the parser receives a private copy of the input
|
||||
// string and can munge around in it.
|
||||
static StringToken unescapeStr(SymbolTable & symbols, char * s, size_t length)
|
||||
|
@ -79,7 +74,7 @@ static StringToken unescapeStr(SymbolTable & symbols, char * s, size_t length)
|
|||
#pragma GCC diagnostic ignored "-Wimplicit-fallthrough"
|
||||
|
||||
#define YY_USER_INIT initLoc(yylloc)
|
||||
#define YY_USER_ACTION adjustLoc(yylloc, yytext, yyleng);
|
||||
#define YY_USER_ACTION adjustLoc(yyscanner, yylloc, yytext, yyleng);
|
||||
|
||||
#define PUSH_STATE(state) yy_push_state(state, yyscanner)
|
||||
#define POP_STATE() yy_pop_state(yyscanner)
|
||||
|
@ -279,9 +274,32 @@ or { return OR_KW; }
|
|||
{SPATH} { yylval->path = {yytext, (size_t) yyleng}; return SPATH; }
|
||||
{URI} { yylval->uri = {yytext, (size_t) yyleng}; return URI; }
|
||||
|
||||
[ \t\r\n]+ /* eat up whitespace */
|
||||
\#[^\r\n]* /* single-line comments */
|
||||
\/\*([^*]|\*+[^*/])*\*+\/ /* long comments */
|
||||
%{
|
||||
// Doc comment rule
|
||||
//
|
||||
// \/\*\* /**
|
||||
// [^/*] reject /**/ (empty comment) and /***
|
||||
// ([^*]|\*+[^*/])*\*+\/ same as the long comment rule
|
||||
// ( )* zero or more non-ending sequences
|
||||
// \* end(1)
|
||||
// \/ end(2)
|
||||
%}
|
||||
\/\*\*[^/*]([^*]|\*+[^*/])*\*+\/ /* doc comments */ {
|
||||
LexerState & lexerState = *yyget_extra(yyscanner);
|
||||
lexerState.docCommentDistance = 0;
|
||||
lexerState.lastDocCommentLoc.beginOffset = yylloc->beginOffset;
|
||||
lexerState.lastDocCommentLoc.endOffset = yylloc->endOffset;
|
||||
}
|
||||
|
||||
|
||||
%{
|
||||
// The following rules have docCommentDistance--
|
||||
// This compensates for the docCommentDistance++ which happens by default to
|
||||
// make all the other rules invalidate the doc comment.
|
||||
%}
|
||||
[ \t\r\n]+ /* eat up whitespace */ { yyget_extra(yyscanner)->docCommentDistance--; }
|
||||
\#[^\r\n]* /* single-line comments */ { yyget_extra(yyscanner)->docCommentDistance--; }
|
||||
\/\*([^*]|\*+[^*/])*\*+\/ /* long comments */ { yyget_extra(yyscanner)->docCommentDistance--; }
|
||||
|
||||
{ANY} {
|
||||
/* Don't return a negative number, as this will cause
|
||||
|
|
|
@ -139,6 +139,7 @@ sources = files(
|
|||
'function-trace.cc',
|
||||
'get-drvs.cc',
|
||||
'json-to-value.cc',
|
||||
'lexer-helpers.cc',
|
||||
'nixexpr.cc',
|
||||
'paths.cc',
|
||||
'primops.cc',
|
||||
|
@ -165,6 +166,7 @@ headers = [config_h] + files(
|
|||
'gc-small-vector.hh',
|
||||
'get-drvs.hh',
|
||||
'json-to-value.hh',
|
||||
# internal: 'lexer-helpers.hh',
|
||||
'nixexpr.hh',
|
||||
'parser-state.hh',
|
||||
'pos-idx.hh',
|
||||
|
|
|
@ -583,6 +583,22 @@ std::string ExprLambda::showNamePos(const EvalState & state) const
|
|||
return fmt("%1% at %2%", id, state.positions[pos]);
|
||||
}
|
||||
|
||||
void ExprLambda::setDocComment(DocComment docComment) {
|
||||
// RFC 145 specifies that the innermost doc comment wins.
|
||||
// See https://github.com/NixOS/rfcs/blob/master/rfcs/0145-doc-strings.md#ambiguous-placement
|
||||
if (!this->docComment) {
|
||||
this->docComment = docComment;
|
||||
|
||||
// Curried functions are defined by putting a function directly
|
||||
// in the body of another function. To render docs for those, we
|
||||
// need to propagate the doc comment to the innermost function.
|
||||
//
|
||||
// If we have our own comment, we've already propagated it, so this
|
||||
// belongs in the same conditional.
|
||||
body->setDocComment(docComment);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
|
||||
/* Position table. */
|
||||
|
@ -627,4 +643,22 @@ size_t SymbolTable::totalSize() const
|
|||
return n;
|
||||
}
|
||||
|
||||
std::string DocComment::getInnerText(const PosTable & positions) const {
|
||||
auto beginPos = positions[begin];
|
||||
auto endPos = positions[end];
|
||||
auto docCommentStr = beginPos.getSnippetUpTo(endPos).value_or("");
|
||||
|
||||
// Strip "/**" and "*/"
|
||||
constexpr size_t prefixLen = 3;
|
||||
constexpr size_t suffixLen = 2;
|
||||
std::string docStr = docCommentStr.substr(prefixLen, docCommentStr.size() - prefixLen - suffixLen);
|
||||
if (docStr.empty())
|
||||
return {};
|
||||
// Turn the now missing "/**" into indentation
|
||||
docStr = " " + docStr;
|
||||
// Strip indentation (for the whole, potentially multi-line string)
|
||||
docStr = stripIndentation(docStr);
|
||||
return docStr;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -11,13 +11,53 @@
|
|||
|
||||
namespace nix {
|
||||
|
||||
|
||||
struct Env;
|
||||
struct Value;
|
||||
class EvalState;
|
||||
class PosTable;
|
||||
struct Env;
|
||||
struct ExprWith;
|
||||
struct StaticEnv;
|
||||
struct Value;
|
||||
|
||||
/**
|
||||
* A documentation comment, in the sense of [RFC 145](https://github.com/NixOS/rfcs/blob/master/rfcs/0145-doc-strings.md)
|
||||
*
|
||||
* Note that this does not implement the following:
|
||||
* - argument attribute names ("formals"): TBD
|
||||
* - argument names: these are internal to the function and their names may not be optimal for documentation
|
||||
* - function arity (degree of currying or number of ':'s):
|
||||
* - Functions returning partially applied functions have a higher arity
|
||||
* than can be determined locally and without evaluation.
|
||||
* We do not want to present false data.
|
||||
* - Some functions should be thought of as transformations of other
|
||||
* functions. For instance `overlay -> overlay -> overlay` is the simplest
|
||||
* way to understand `composeExtensions`, but its implementation looks like
|
||||
* `f: g: final: prev: <...>`. The parameters `final` and `prev` are part
|
||||
* of the overlay concept, while distracting from the function's purpose.
|
||||
*/
|
||||
struct DocComment {
|
||||
|
||||
/**
|
||||
* Start of the comment, including the opening, ie `/` and `**`.
|
||||
*/
|
||||
PosIdx begin;
|
||||
|
||||
/**
|
||||
* Position right after the final asterisk and `/` that terminate the comment.
|
||||
*/
|
||||
PosIdx end;
|
||||
|
||||
/**
|
||||
* Whether the comment is set.
|
||||
*
|
||||
* A `DocComment` is small enough that it makes sense to pass by value, and
|
||||
* therefore baking optionality into it is also useful, to avoiding the memory
|
||||
* overhead of `std::optional`.
|
||||
*/
|
||||
operator bool() const { return static_cast<bool>(begin); }
|
||||
|
||||
std::string getInnerText(const PosTable & positions) const;
|
||||
|
||||
};
|
||||
|
||||
/**
|
||||
* An attribute path is a sequence of attribute names.
|
||||
|
@ -54,6 +94,7 @@ struct Expr
|
|||
virtual void eval(EvalState & state, Env & env, Value & v);
|
||||
virtual Value * maybeThunk(EvalState & state, Env & env);
|
||||
virtual void setName(Symbol name);
|
||||
virtual void setDocComment(DocComment docComment) { };
|
||||
virtual PosIdx getPos() const { return noPos; }
|
||||
};
|
||||
|
||||
|
@ -156,6 +197,17 @@ struct ExprSelect : Expr
|
|||
ExprSelect(const PosIdx & pos, Expr * e, AttrPath attrPath, Expr * def) : pos(pos), e(e), def(def), attrPath(std::move(attrPath)) { };
|
||||
ExprSelect(const PosIdx & pos, Expr * e, Symbol name) : pos(pos), e(e), def(0) { attrPath.push_back(AttrName(name)); };
|
||||
PosIdx getPos() const override { return pos; }
|
||||
|
||||
/**
|
||||
* Evaluate the `a.b.c` part of `a.b.c.d`. This exists mostly for the purpose of :doc in the repl.
|
||||
*
|
||||
* @param[out] v The attribute set that should contain the last attribute name (if it exists).
|
||||
* @return The last attribute name in `attrPath`
|
||||
*
|
||||
* @note This does *not* evaluate the final attribute, and does not fail if that's the only attribute that does not exist.
|
||||
*/
|
||||
Symbol evalExceptFinalSelect(EvalState & state, Env & env, Value & attrs);
|
||||
|
||||
COMMON_METHODS
|
||||
};
|
||||
|
||||
|
@ -278,6 +330,8 @@ struct ExprLambda : Expr
|
|||
Symbol arg;
|
||||
Formals * formals;
|
||||
Expr * body;
|
||||
DocComment docComment;
|
||||
|
||||
ExprLambda(PosIdx pos, Symbol arg, Formals * formals, Expr * body)
|
||||
: pos(pos), arg(arg), formals(formals), body(body)
|
||||
{
|
||||
|
@ -290,6 +344,7 @@ struct ExprLambda : Expr
|
|||
std::string showNamePos(const EvalState & state) const;
|
||||
inline bool hasFormals() const { return formals != nullptr; }
|
||||
PosIdx getPos() const override { return pos; }
|
||||
virtual void setDocComment(DocComment docComment) override;
|
||||
COMMON_METHODS
|
||||
};
|
||||
|
||||
|
|
|
@ -1,6 +1,8 @@
|
|||
#pragma once
|
||||
///@file
|
||||
|
||||
#include <limits>
|
||||
|
||||
#include "eval.hh"
|
||||
|
||||
namespace nix {
|
||||
|
@ -20,25 +22,59 @@ struct StringToken
|
|||
|
||||
struct ParserLocation
|
||||
{
|
||||
int first_line, first_column;
|
||||
int last_line, last_column;
|
||||
int beginOffset;
|
||||
int endOffset;
|
||||
|
||||
// backup to recover from yyless(0)
|
||||
int stashed_first_column, stashed_last_column;
|
||||
int stashedBeginOffset, stashedEndOffset;
|
||||
|
||||
void stash() {
|
||||
stashed_first_column = first_column;
|
||||
stashed_last_column = last_column;
|
||||
stashedBeginOffset = beginOffset;
|
||||
stashedEndOffset = endOffset;
|
||||
}
|
||||
|
||||
void unstash() {
|
||||
first_column = stashed_first_column;
|
||||
last_column = stashed_last_column;
|
||||
beginOffset = stashedBeginOffset;
|
||||
endOffset = stashedEndOffset;
|
||||
}
|
||||
|
||||
/** Latest doc comment position, or 0. */
|
||||
int doc_comment_first_column, doc_comment_last_column;
|
||||
};
|
||||
|
||||
struct LexerState
|
||||
{
|
||||
/**
|
||||
* Tracks the distance to the last doc comment, in terms of lexer tokens.
|
||||
*
|
||||
* The lexer sets this to 0 when reading a doc comment, and increments it
|
||||
* for every matched rule; see `lexer-helpers.cc`.
|
||||
* Whitespace and comment rules decrement the distance, so that they result
|
||||
* in a net 0 change in distance.
|
||||
*/
|
||||
int docCommentDistance = std::numeric_limits<int>::max();
|
||||
|
||||
/**
|
||||
* The location of the last doc comment.
|
||||
*
|
||||
* (stashing fields are not used)
|
||||
*/
|
||||
ParserLocation lastDocCommentLoc;
|
||||
|
||||
/**
|
||||
* @brief Maps some positions to a DocComment, where the comment is relevant to the location.
|
||||
*/
|
||||
std::map<PosIdx, DocComment> & positionToDocComment;
|
||||
|
||||
PosTable & positions;
|
||||
PosTable::Origin origin;
|
||||
|
||||
PosIdx at(const ParserLocation & loc);
|
||||
};
|
||||
|
||||
struct ParserState
|
||||
{
|
||||
const LexerState & lexerState;
|
||||
SymbolTable & symbols;
|
||||
PosTable & positions;
|
||||
Expr * result;
|
||||
|
@ -270,9 +306,14 @@ inline Expr * ParserState::stripIndentation(const PosIdx pos,
|
|||
return new ExprConcatStrings(pos, true, es2);
|
||||
}
|
||||
|
||||
inline PosIdx LexerState::at(const ParserLocation & loc)
|
||||
{
|
||||
return positions.add(origin, loc.beginOffset);
|
||||
}
|
||||
|
||||
inline PosIdx ParserState::at(const ParserLocation & loc)
|
||||
{
|
||||
return positions.add(origin, loc.first_column);
|
||||
return positions.add(origin, loc.beginOffset);
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -31,8 +31,25 @@
|
|||
#define YY_DECL int yylex \
|
||||
(YYSTYPE * yylval_param, YYLTYPE * yylloc_param, yyscan_t yyscanner, nix::ParserState * state)
|
||||
|
||||
// For efficiency, we only track offsets; not line,column coordinates
|
||||
# define YYLLOC_DEFAULT(Current, Rhs, N) \
|
||||
do \
|
||||
if (N) \
|
||||
{ \
|
||||
(Current).beginOffset = YYRHSLOC (Rhs, 1).beginOffset; \
|
||||
(Current).endOffset = YYRHSLOC (Rhs, N).endOffset; \
|
||||
} \
|
||||
else \
|
||||
{ \
|
||||
(Current).beginOffset = (Current).endOffset = \
|
||||
YYRHSLOC (Rhs, 0).endOffset; \
|
||||
} \
|
||||
while (0)
|
||||
|
||||
namespace nix {
|
||||
|
||||
typedef std::map<PosIdx, DocComment> DocCommentMap;
|
||||
|
||||
Expr * parseExprFromBuf(
|
||||
char * text,
|
||||
size_t length,
|
||||
|
@ -41,6 +58,7 @@ Expr * parseExprFromBuf(
|
|||
SymbolTable & symbols,
|
||||
const EvalSettings & settings,
|
||||
PosTable & positions,
|
||||
DocCommentMap & docComments,
|
||||
const ref<SourceAccessor> rootFS,
|
||||
const Expr::AstSymbols & astSymbols);
|
||||
|
||||
|
@ -65,8 +83,7 @@ using namespace nix;
|
|||
void yyerror(YYLTYPE * loc, yyscan_t scanner, ParserState * state, const char * error)
|
||||
{
|
||||
if (std::string_view(error).starts_with("syntax error, unexpected end of file")) {
|
||||
loc->first_column = loc->last_column;
|
||||
loc->first_line = loc->last_line;
|
||||
loc->beginOffset = loc->endOffset;
|
||||
}
|
||||
throw ParseError({
|
||||
.msg = HintFmt(error),
|
||||
|
@ -74,6 +91,14 @@ void yyerror(YYLTYPE * loc, yyscan_t scanner, ParserState * state, const char *
|
|||
});
|
||||
}
|
||||
|
||||
#define SET_DOC_POS(lambda, pos) setDocPosition(state->lexerState, lambda, state->at(pos))
|
||||
static void setDocPosition(const LexerState & lexerState, ExprLambda * lambda, PosIdx start) {
|
||||
auto it = lexerState.positionToDocComment.find(start);
|
||||
if (it != lexerState.positionToDocComment.end()) {
|
||||
lambda->setDocComment(it->second);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
%}
|
||||
|
||||
|
@ -119,6 +144,7 @@ void yyerror(YYLTYPE * loc, yyscan_t scanner, ParserState * state, const char *
|
|||
%token IND_STRING_OPEN IND_STRING_CLOSE
|
||||
%token ELLIPSIS
|
||||
|
||||
|
||||
%right IMPL
|
||||
%left OR
|
||||
%left AND
|
||||
|
@ -140,18 +166,28 @@ expr: expr_function;
|
|||
|
||||
expr_function
|
||||
: ID ':' expr_function
|
||||
{ $$ = new ExprLambda(CUR_POS, state->symbols.create($1), 0, $3); }
|
||||
{ auto me = new ExprLambda(CUR_POS, state->symbols.create($1), 0, $3);
|
||||
$$ = me;
|
||||
SET_DOC_POS(me, @1);
|
||||
}
|
||||
| '{' formals '}' ':' expr_function
|
||||
{ $$ = new ExprLambda(CUR_POS, state->validateFormals($2), $5); }
|
||||
{ auto me = new ExprLambda(CUR_POS, state->validateFormals($2), $5);
|
||||
$$ = me;
|
||||
SET_DOC_POS(me, @1);
|
||||
}
|
||||
| '{' formals '}' '@' ID ':' expr_function
|
||||
{
|
||||
auto arg = state->symbols.create($5);
|
||||
$$ = new ExprLambda(CUR_POS, arg, state->validateFormals($2, CUR_POS, arg), $7);
|
||||
auto me = new ExprLambda(CUR_POS, arg, state->validateFormals($2, CUR_POS, arg), $7);
|
||||
$$ = me;
|
||||
SET_DOC_POS(me, @1);
|
||||
}
|
||||
| ID '@' '{' formals '}' ':' expr_function
|
||||
{
|
||||
auto arg = state->symbols.create($1);
|
||||
$$ = new ExprLambda(CUR_POS, arg, state->validateFormals($4, CUR_POS, arg), $7);
|
||||
auto me = new ExprLambda(CUR_POS, arg, state->validateFormals($4, CUR_POS, arg), $7);
|
||||
$$ = me;
|
||||
SET_DOC_POS(me, @1);
|
||||
}
|
||||
| ASSERT expr ';' expr_function
|
||||
{ $$ = new ExprAssert(CUR_POS, $2, $4); }
|
||||
|
@ -312,7 +348,22 @@ ind_string_parts
|
|||
;
|
||||
|
||||
binds
|
||||
: binds attrpath '=' expr ';' { $$ = $1; state->addAttr($$, std::move(*$2), $4, state->at(@2)); delete $2; }
|
||||
: binds attrpath '=' expr ';' {
|
||||
$$ = $1;
|
||||
|
||||
auto pos = state->at(@2);
|
||||
auto exprPos = state->at(@4);
|
||||
{
|
||||
auto it = state->lexerState.positionToDocComment.find(pos);
|
||||
if (it != state->lexerState.positionToDocComment.end()) {
|
||||
$4->setDocComment(it->second);
|
||||
state->lexerState.positionToDocComment.emplace(exprPos, it->second);
|
||||
}
|
||||
}
|
||||
|
||||
state->addAttr($$, std::move(*$2), $4, pos);
|
||||
delete $2;
|
||||
}
|
||||
| binds INHERIT attrs ';'
|
||||
{ $$ = $1;
|
||||
for (auto & [i, iPos] : *$3) {
|
||||
|
@ -431,21 +482,28 @@ Expr * parseExprFromBuf(
|
|||
SymbolTable & symbols,
|
||||
const EvalSettings & settings,
|
||||
PosTable & positions,
|
||||
DocCommentMap & docComments,
|
||||
const ref<SourceAccessor> rootFS,
|
||||
const Expr::AstSymbols & astSymbols)
|
||||
{
|
||||
yyscan_t scanner;
|
||||
LexerState lexerState {
|
||||
.positionToDocComment = docComments,
|
||||
.positions = positions,
|
||||
.origin = positions.addOrigin(origin, length),
|
||||
};
|
||||
ParserState state {
|
||||
.lexerState = lexerState,
|
||||
.symbols = symbols,
|
||||
.positions = positions,
|
||||
.basePath = basePath,
|
||||
.origin = positions.addOrigin(origin, length),
|
||||
.origin = lexerState.origin,
|
||||
.rootFS = rootFS,
|
||||
.s = astSymbols,
|
||||
.settings = settings,
|
||||
};
|
||||
|
||||
yylex_init(&scanner);
|
||||
yylex_init_extra(&lexerState, &scanner);
|
||||
Finally _destroy([&] { yylex_destroy(scanner); });
|
||||
|
||||
yy_scan_buffer(text, length, scanner);
|
||||
|
|
|
@ -110,4 +110,50 @@ void Pos::LinesIterator::bump(bool atFirst)
|
|||
input.remove_prefix(eol);
|
||||
}
|
||||
|
||||
std::optional<std::string> Pos::getSnippetUpTo(const Pos & end) const {
|
||||
assert(this->origin == end.origin);
|
||||
|
||||
if (end.line < this->line)
|
||||
return std::nullopt;
|
||||
|
||||
if (auto source = getSource()) {
|
||||
|
||||
auto firstLine = LinesIterator(*source);
|
||||
for (auto i = 1; i < this->line; ++i) {
|
||||
++firstLine;
|
||||
}
|
||||
|
||||
auto lastLine = LinesIterator(*source);
|
||||
for (auto i = 1; i < end.line; ++i) {
|
||||
++lastLine;
|
||||
}
|
||||
|
||||
LinesIterator linesEnd;
|
||||
|
||||
std::string result;
|
||||
for (auto i = firstLine; i != linesEnd; ++i) {
|
||||
auto firstColumn = i == firstLine ? (this->column ? this->column - 1 : 0) : 0;
|
||||
if (firstColumn > i->size())
|
||||
firstColumn = i->size();
|
||||
|
||||
auto lastColumn = i == lastLine ? (end.column ? end.column - 1 : 0) : std::numeric_limits<int>::max();
|
||||
if (lastColumn < firstColumn)
|
||||
lastColumn = firstColumn;
|
||||
if (lastColumn > i->size())
|
||||
lastColumn = i->size();
|
||||
|
||||
result += i->substr(firstColumn, lastColumn - firstColumn);
|
||||
|
||||
if (i == lastLine) {
|
||||
break;
|
||||
} else {
|
||||
result += '\n';
|
||||
}
|
||||
}
|
||||
return result;
|
||||
}
|
||||
return std::nullopt;
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
|
|
@ -7,6 +7,7 @@
|
|||
|
||||
#include <cstdint>
|
||||
#include <string>
|
||||
#include <variant>
|
||||
|
||||
#include "source-path.hh"
|
||||
|
||||
|
@ -63,6 +64,15 @@ struct Pos
|
|||
bool operator==(const Pos & rhs) const = default;
|
||||
auto operator<=>(const Pos & rhs) const = default;
|
||||
|
||||
std::optional<std::string> getSnippetUpTo(const Pos & end) const;
|
||||
|
||||
/**
|
||||
* Get the SourcePath, if the source was loaded from a file.
|
||||
*/
|
||||
std::optional<SourcePath> getSourcePath() const {
|
||||
return *std::get_if<SourcePath>(&origin);
|
||||
}
|
||||
|
||||
struct LinesIterator {
|
||||
using difference_type = size_t;
|
||||
using value_type = std::string_view;
|
||||
|
|
|
@ -63,7 +63,7 @@ function characterisationTestExit() {
|
|||
echo ''
|
||||
echo 'You can rerun this test with:'
|
||||
echo ''
|
||||
echo " _NIX_TEST_ACCEPT=1 make tests/functional/${TEST_NAME}.test"
|
||||
echo " _NIX_TEST_ACCEPT=1 make tests/functional/${TEST_NAME}.sh.test"
|
||||
echo ''
|
||||
echo 'to regenerate the files containing the expected output,'
|
||||
echo 'and then view the git diff to decide whether a change is'
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
source common.sh
|
||||
source characterisation/framework.sh
|
||||
|
||||
testDir="$PWD"
|
||||
cd "$TEST_ROOT"
|
||||
|
@ -244,3 +245,42 @@ testReplResponseNoRegex '
|
|||
y = { a = 1 };
|
||||
}
|
||||
'
|
||||
|
||||
# TODO: move init to characterisation/framework.sh
|
||||
badDiff=0
|
||||
badExitCode=0
|
||||
|
||||
nixVersion="$(nix eval --impure --raw --expr 'builtins.nixVersion' --extra-experimental-features nix-command)"
|
||||
|
||||
runRepl () {
|
||||
|
||||
# That is right, we are also filtering out the testdir _without underscores_.
|
||||
# This is crazy, but without it, GHA will fail to run the tests, showing paths
|
||||
# _with_ underscores in the set -x log, but _without_ underscores in the
|
||||
# supposed nix repl output. I have looked in a number of places, but I cannot
|
||||
# find a mechanism that could cause this to happen.
|
||||
local testDirNoUnderscores
|
||||
testDirNoUnderscores="${testDir//_/}"
|
||||
|
||||
# TODO: pass arguments to nix repl; see lang.sh
|
||||
nix repl 2>&1 \
|
||||
| stripColors \
|
||||
| sed \
|
||||
-e "s@$testDir@/path/to/tests/functional@g" \
|
||||
-e "s@$testDirNoUnderscores@/path/to/tests/functional@g" \
|
||||
-e "s@$nixVersion@<nix version>@g" \
|
||||
-e "s@Added [0-9]* variables@Added <number omitted> variables@g" \
|
||||
| grep -vF $'warning: you don\'t have Internet access; disabling some network-dependent features' \
|
||||
;
|
||||
}
|
||||
|
||||
for test in $(cd "$testDir/repl"; echo *.in); do
|
||||
test="$(basename "$test" .in)"
|
||||
in="$testDir/repl/$test.in"
|
||||
actual="$testDir/repl/$test.actual"
|
||||
expected="$testDir/repl/$test.expected"
|
||||
(cd "$testDir/repl"; set +x; runRepl 2>&1) < "$in" > "$actual"
|
||||
diffAndAcceptInner "$test" "$actual" "$expected"
|
||||
done
|
||||
|
||||
characterisationTestExit
|
||||
|
|
0
tests/functional/repl/characterisation/empty
Normal file
0
tests/functional/repl/characterisation/empty
Normal file
24
tests/functional/repl/doc-comment-curried-args.expected
Normal file
24
tests/functional/repl/doc-comment-curried-args.expected
Normal file
|
@ -0,0 +1,24 @@
|
|||
Nix <nix version>
|
||||
Type :? for help.
|
||||
Added <number omitted> variables.
|
||||
|
||||
Function curriedArgs
|
||||
… defined at
|
||||
/path/to/tests/functional/repl/doc-comments.nix:48:5
|
||||
|
||||
A documented function.
|
||||
|
||||
|
||||
"Note that users may not expect this to behave as it currently does"
|
||||
|
||||
Function curriedArgs
|
||||
… defined at
|
||||
/path/to/tests/functional/repl/doc-comments.nix:50:5
|
||||
|
||||
The function returned by applying once
|
||||
|
||||
"This won't produce documentation, because we can't actually document arbitrary values"
|
||||
|
||||
error: value does not have documentation
|
||||
|
||||
|
7
tests/functional/repl/doc-comment-curried-args.in
Normal file
7
tests/functional/repl/doc-comment-curried-args.in
Normal file
|
@ -0,0 +1,7 @@
|
|||
:l doc-comments.nix
|
||||
:doc curriedArgs
|
||||
x = curriedArgs 1
|
||||
"Note that users may not expect this to behave as it currently does"
|
||||
:doc x
|
||||
"This won't produce documentation, because we can't actually document arbitrary values"
|
||||
:doc x 2
|
13
tests/functional/repl/doc-comment-formals.expected
Normal file
13
tests/functional/repl/doc-comment-formals.expected
Normal file
|
@ -0,0 +1,13 @@
|
|||
Nix <nix version>
|
||||
Type :? for help.
|
||||
Added <number omitted> variables.
|
||||
|
||||
"Note that this is not yet complete"
|
||||
|
||||
Function documentedFormals
|
||||
… defined at
|
||||
/path/to/tests/functional/repl/doc-comments.nix:57:5
|
||||
|
||||
Finds x
|
||||
|
||||
|
3
tests/functional/repl/doc-comment-formals.in
Normal file
3
tests/functional/repl/doc-comment-formals.in
Normal file
|
@ -0,0 +1,3 @@
|
|||
:l doc-comments.nix
|
||||
"Note that this is not yet complete"
|
||||
:doc documentedFormals
|
8
tests/functional/repl/doc-comment-function.expected
Normal file
8
tests/functional/repl/doc-comment-function.expected
Normal file
|
@ -0,0 +1,8 @@
|
|||
Nix <nix version>
|
||||
Type :? for help.
|
||||
Function defined at
|
||||
/path/to/tests/functional/repl/doc-comment-function.nix:2:1
|
||||
|
||||
A doc comment for a file that only contains a function
|
||||
|
||||
|
1
tests/functional/repl/doc-comment-function.in
Normal file
1
tests/functional/repl/doc-comment-function.in
Normal file
|
@ -0,0 +1 @@
|
|||
:doc import ./doc-comment-function.nix
|
3
tests/functional/repl/doc-comment-function.nix
Normal file
3
tests/functional/repl/doc-comment-function.nix
Normal file
|
@ -0,0 +1,3 @@
|
|||
/** A doc comment for a file that only contains a function */
|
||||
{ ... }:
|
||||
{ }
|
60
tests/functional/repl/doc-comments.nix
Normal file
60
tests/functional/repl/doc-comments.nix
Normal file
|
@ -0,0 +1,60 @@
|
|||
{
|
||||
/**
|
||||
Perform *arithmetic* multiplication. It's kind of like repeated **addition**, very neat.
|
||||
|
||||
```nix
|
||||
multiply 2 3
|
||||
=> 6
|
||||
```
|
||||
*/
|
||||
multiply = x: y: x * y;
|
||||
|
||||
/**👈 precisely this wide 👉*/
|
||||
measurement = x: x;
|
||||
|
||||
floatedIn = /** This also works. */
|
||||
x: y: x;
|
||||
|
||||
compact=/**boom*/x: x;
|
||||
|
||||
# https://github.com/NixOS/rfcs/blob/master/rfcs/0145-doc-strings.md#ambiguous-placement
|
||||
/** Ignore!!! */
|
||||
unambiguous =
|
||||
/** Very close */
|
||||
x: x;
|
||||
|
||||
/** Firmly rigid. */
|
||||
constant = true;
|
||||
|
||||
/** Immovably fixed. */
|
||||
lib.version = "9000";
|
||||
|
||||
/** Unchangeably constant. */
|
||||
lib.attr.empty = { };
|
||||
|
||||
lib.attr.undocumented = { };
|
||||
|
||||
nonStrict = /** My syntax is not strict, but I'm strict anyway. */ x: x;
|
||||
strict = /** I don't have to be strict, but I am anyway. */ { ... }: null;
|
||||
# Note that pre and post are the same here. I just had to name them somehow.
|
||||
strictPre = /** Here's one way to do this */ a@{ ... }: a;
|
||||
strictPost = /** Here's another way to do this */ { ... }@a: a;
|
||||
|
||||
# TODO
|
||||
|
||||
/** You won't see this. */
|
||||
curriedArgs =
|
||||
/** A documented function. */
|
||||
x:
|
||||
/** The function returned by applying once */
|
||||
y:
|
||||
/** A function body performing summation of two items */
|
||||
x + y;
|
||||
|
||||
/** Documented formals (but you won't see this comment) */
|
||||
documentedFormals =
|
||||
/** Finds x */
|
||||
{ /** The x attribute */
|
||||
x
|
||||
}: x;
|
||||
}
|
11
tests/functional/repl/doc-compact.expected
Normal file
11
tests/functional/repl/doc-compact.expected
Normal file
|
@ -0,0 +1,11 @@
|
|||
Nix <nix version>
|
||||
Type :? for help.
|
||||
Added <number omitted> variables.
|
||||
|
||||
Function compact
|
||||
… defined at
|
||||
/path/to/tests/functional/repl/doc-comments.nix:18:20
|
||||
|
||||
boom
|
||||
|
||||
|
2
tests/functional/repl/doc-compact.in
Normal file
2
tests/functional/repl/doc-compact.in
Normal file
|
@ -0,0 +1,2 @@
|
|||
:l doc-comments.nix
|
||||
:doc compact
|
102
tests/functional/repl/doc-constant.expected
Normal file
102
tests/functional/repl/doc-constant.expected
Normal file
|
@ -0,0 +1,102 @@
|
|||
Nix <nix version>
|
||||
Type :? for help.
|
||||
Added <number omitted> variables.
|
||||
|
||||
error: value does not have documentation
|
||||
|
||||
Attribute version
|
||||
|
||||
… defined at
|
||||
/path/to/tests/functional/repl/doc-comments.nix:30:3
|
||||
|
||||
Immovably fixed.
|
||||
|
||||
Attribute empty
|
||||
|
||||
… defined at
|
||||
/path/to/tests/functional/repl/doc-comments.nix:33:3
|
||||
|
||||
Unchangeably constant.
|
||||
|
||||
error:
|
||||
… while evaluating the attribute 'attr.undocument'
|
||||
at /path/to/tests/functional/repl/doc-comments.nix:33:3:
|
||||
32| /** Unchangeably constant. */
|
||||
33| lib.attr.empty = { };
|
||||
| ^
|
||||
34|
|
||||
|
||||
error: attribute 'undocument' missing
|
||||
at «string»:1:1:
|
||||
1| lib.attr.undocument
|
||||
| ^
|
||||
Did you mean undocumented?
|
||||
|
||||
Attribute constant
|
||||
|
||||
… defined at
|
||||
/path/to/tests/functional/repl/doc-comments.nix:27:3
|
||||
|
||||
Firmly rigid.
|
||||
|
||||
Attribute version
|
||||
|
||||
… defined at
|
||||
/path/to/tests/functional/repl/doc-comments.nix:30:3
|
||||
|
||||
Immovably fixed.
|
||||
|
||||
Attribute empty
|
||||
|
||||
… defined at
|
||||
/path/to/tests/functional/repl/doc-comments.nix:33:3
|
||||
|
||||
Unchangeably constant.
|
||||
|
||||
Attribute undocumented
|
||||
|
||||
… defined at
|
||||
/path/to/tests/functional/repl/doc-comments.nix:35:3
|
||||
|
||||
No documentation found.
|
||||
|
||||
error: undefined variable 'missing'
|
||||
at «string»:1:1:
|
||||
1| missing
|
||||
| ^
|
||||
|
||||
error: undefined variable 'constanz'
|
||||
at «string»:1:1:
|
||||
1| constanz
|
||||
| ^
|
||||
|
||||
error: undefined variable 'missing'
|
||||
at «string»:1:1:
|
||||
1| missing.attr
|
||||
| ^
|
||||
|
||||
error: attribute 'missing' missing
|
||||
at «string»:1:1:
|
||||
1| lib.missing
|
||||
| ^
|
||||
|
||||
error: attribute 'missing' missing
|
||||
at «string»:1:1:
|
||||
1| lib.missing.attr
|
||||
| ^
|
||||
|
||||
error:
|
||||
… while evaluating the attribute 'attr.undocumental'
|
||||
at /path/to/tests/functional/repl/doc-comments.nix:33:3:
|
||||
32| /** Unchangeably constant. */
|
||||
33| lib.attr.empty = { };
|
||||
| ^
|
||||
34|
|
||||
|
||||
error: attribute 'undocumental' missing
|
||||
at «string»:1:1:
|
||||
1| lib.attr.undocumental
|
||||
| ^
|
||||
Did you mean undocumented?
|
||||
|
||||
|
15
tests/functional/repl/doc-constant.in
Normal file
15
tests/functional/repl/doc-constant.in
Normal file
|
@ -0,0 +1,15 @@
|
|||
:l doc-comments.nix
|
||||
:doc constant
|
||||
:doc lib.version
|
||||
:doc lib.attr.empty
|
||||
:doc lib.attr.undocument
|
||||
:doc (import ./doc-comments.nix).constant
|
||||
:doc (import ./doc-comments.nix).lib.version
|
||||
:doc (import ./doc-comments.nix).lib.attr.empty
|
||||
:doc (import ./doc-comments.nix).lib.attr.undocumented
|
||||
:doc missing
|
||||
:doc constanz
|
||||
:doc missing.attr
|
||||
:doc lib.missing
|
||||
:doc lib.missing.attr
|
||||
:doc lib.attr.undocumental
|
11
tests/functional/repl/doc-floatedIn.expected
Normal file
11
tests/functional/repl/doc-floatedIn.expected
Normal file
|
@ -0,0 +1,11 @@
|
|||
Nix <nix version>
|
||||
Type :? for help.
|
||||
Added <number omitted> variables.
|
||||
|
||||
Function floatedIn
|
||||
… defined at
|
||||
/path/to/tests/functional/repl/doc-comments.nix:16:5
|
||||
|
||||
This also works.
|
||||
|
||||
|
2
tests/functional/repl/doc-floatedIn.in
Normal file
2
tests/functional/repl/doc-floatedIn.in
Normal file
|
@ -0,0 +1,2 @@
|
|||
:l doc-comments.nix
|
||||
:doc floatedIn
|
29
tests/functional/repl/doc-lambda-flavors.expected
Normal file
29
tests/functional/repl/doc-lambda-flavors.expected
Normal file
|
@ -0,0 +1,29 @@
|
|||
Nix <nix version>
|
||||
Type :? for help.
|
||||
Added <number omitted> variables.
|
||||
|
||||
Function nonStrict
|
||||
… defined at
|
||||
/path/to/tests/functional/repl/doc-comments.nix:37:70
|
||||
|
||||
My syntax is not strict, but I'm strict anyway.
|
||||
|
||||
Function strict
|
||||
… defined at
|
||||
/path/to/tests/functional/repl/doc-comments.nix:38:63
|
||||
|
||||
I don't have to be strict, but I am anyway.
|
||||
|
||||
Function strictPre
|
||||
… defined at
|
||||
/path/to/tests/functional/repl/doc-comments.nix:40:48
|
||||
|
||||
Here's one way to do this
|
||||
|
||||
Function strictPost
|
||||
… defined at
|
||||
/path/to/tests/functional/repl/doc-comments.nix:41:53
|
||||
|
||||
Here's another way to do this
|
||||
|
||||
|
5
tests/functional/repl/doc-lambda-flavors.in
Normal file
5
tests/functional/repl/doc-lambda-flavors.in
Normal file
|
@ -0,0 +1,5 @@
|
|||
:l doc-comments.nix
|
||||
:doc nonStrict
|
||||
:doc strict
|
||||
:doc strictPre
|
||||
:doc strictPost
|
11
tests/functional/repl/doc-measurement.expected
Normal file
11
tests/functional/repl/doc-measurement.expected
Normal file
|
@ -0,0 +1,11 @@
|
|||
Nix <nix version>
|
||||
Type :? for help.
|
||||
Added <number omitted> variables.
|
||||
|
||||
Function measurement
|
||||
… defined at
|
||||
/path/to/tests/functional/repl/doc-comments.nix:13:17
|
||||
|
||||
👈 precisely this wide 👉
|
||||
|
||||
|
2
tests/functional/repl/doc-measurement.in
Normal file
2
tests/functional/repl/doc-measurement.in
Normal file
|
@ -0,0 +1,2 @@
|
|||
:l doc-comments.nix
|
||||
:doc measurement
|
15
tests/functional/repl/doc-multiply.expected
Normal file
15
tests/functional/repl/doc-multiply.expected
Normal file
|
@ -0,0 +1,15 @@
|
|||
Nix <nix version>
|
||||
Type :? for help.
|
||||
Added <number omitted> variables.
|
||||
|
||||
Function multiply
|
||||
… defined at
|
||||
/path/to/tests/functional/repl/doc-comments.nix:10:14
|
||||
|
||||
Perform arithmetic multiplication. It's kind of like
|
||||
repeated addition, very neat.
|
||||
|
||||
| multiply 2 3
|
||||
| => 6
|
||||
|
||||
|
2
tests/functional/repl/doc-multiply.in
Normal file
2
tests/functional/repl/doc-multiply.in
Normal file
|
@ -0,0 +1,2 @@
|
|||
:l doc-comments.nix
|
||||
:doc multiply
|
11
tests/functional/repl/doc-unambiguous.expected
Normal file
11
tests/functional/repl/doc-unambiguous.expected
Normal file
|
@ -0,0 +1,11 @@
|
|||
Nix <nix version>
|
||||
Type :? for help.
|
||||
Added <number omitted> variables.
|
||||
|
||||
Function unambiguous
|
||||
… defined at
|
||||
/path/to/tests/functional/repl/doc-comments.nix:24:5
|
||||
|
||||
Very close
|
||||
|
||||
|
2
tests/functional/repl/doc-unambiguous.in
Normal file
2
tests/functional/repl/doc-unambiguous.in
Normal file
|
@ -0,0 +1,2 @@
|
|||
:l doc-comments.nix
|
||||
:doc unambiguous
|
122
tests/unit/libutil/position.cc
Normal file
122
tests/unit/libutil/position.cc
Normal file
|
@ -0,0 +1,122 @@
|
|||
#include <gtest/gtest.h>
|
||||
|
||||
#include "position.hh"
|
||||
|
||||
namespace nix {
|
||||
|
||||
inline Pos::Origin makeStdin(std::string s)
|
||||
{
|
||||
return Pos::Stdin{make_ref<std::string>(s)};
|
||||
}
|
||||
|
||||
TEST(Position, getSnippetUpTo_0)
|
||||
{
|
||||
Pos::Origin o = makeStdin("");
|
||||
Pos p(1, 1, o);
|
||||
ASSERT_EQ(p.getSnippetUpTo(p), "");
|
||||
}
|
||||
TEST(Position, getSnippetUpTo_1)
|
||||
{
|
||||
Pos::Origin o = makeStdin("x");
|
||||
{
|
||||
// NOTE: line and column are actually 1-based indexes
|
||||
Pos start(0, 0, o);
|
||||
Pos end(99, 99, o);
|
||||
ASSERT_EQ(start.getSnippetUpTo(start), "");
|
||||
ASSERT_EQ(start.getSnippetUpTo(end), "x");
|
||||
ASSERT_EQ(end.getSnippetUpTo(end), "");
|
||||
ASSERT_EQ(end.getSnippetUpTo(start), std::nullopt);
|
||||
}
|
||||
{
|
||||
// NOTE: line and column are actually 1-based indexes
|
||||
Pos start(0, 99, o);
|
||||
Pos end(99, 0, o);
|
||||
ASSERT_EQ(start.getSnippetUpTo(start), "");
|
||||
|
||||
// "x" might be preferable, but we only care about not crashing for invalid inputs
|
||||
ASSERT_EQ(start.getSnippetUpTo(end), "");
|
||||
|
||||
ASSERT_EQ(end.getSnippetUpTo(end), "");
|
||||
ASSERT_EQ(end.getSnippetUpTo(start), std::nullopt);
|
||||
}
|
||||
{
|
||||
Pos start(1, 1, o);
|
||||
Pos end(1, 99, o);
|
||||
ASSERT_EQ(start.getSnippetUpTo(start), "");
|
||||
ASSERT_EQ(start.getSnippetUpTo(end), "x");
|
||||
ASSERT_EQ(end.getSnippetUpTo(end), "");
|
||||
ASSERT_EQ(end.getSnippetUpTo(start), "");
|
||||
}
|
||||
{
|
||||
Pos start(1, 1, o);
|
||||
Pos end(99, 99, o);
|
||||
ASSERT_EQ(start.getSnippetUpTo(start), "");
|
||||
ASSERT_EQ(start.getSnippetUpTo(end), "x");
|
||||
ASSERT_EQ(end.getSnippetUpTo(end), "");
|
||||
ASSERT_EQ(end.getSnippetUpTo(start), std::nullopt);
|
||||
}
|
||||
}
|
||||
TEST(Position, getSnippetUpTo_2)
|
||||
{
|
||||
Pos::Origin o = makeStdin("asdf\njkl\nqwer");
|
||||
{
|
||||
Pos start(1, 1, o);
|
||||
Pos end(1, 2, o);
|
||||
ASSERT_EQ(start.getSnippetUpTo(start), "");
|
||||
ASSERT_EQ(start.getSnippetUpTo(end), "a");
|
||||
ASSERT_EQ(end.getSnippetUpTo(end), "");
|
||||
|
||||
// nullopt? I feel like changing the column handling would just make it more fragile
|
||||
ASSERT_EQ(end.getSnippetUpTo(start), "");
|
||||
}
|
||||
{
|
||||
Pos start(1, 2, o);
|
||||
Pos end(1, 3, o);
|
||||
ASSERT_EQ(start.getSnippetUpTo(end), "s");
|
||||
}
|
||||
{
|
||||
Pos start(1, 2, o);
|
||||
Pos end(2, 2, o);
|
||||
ASSERT_EQ(start.getSnippetUpTo(end), "sdf\nj");
|
||||
}
|
||||
{
|
||||
Pos start(1, 2, o);
|
||||
Pos end(3, 2, o);
|
||||
ASSERT_EQ(start.getSnippetUpTo(end), "sdf\njkl\nq");
|
||||
}
|
||||
{
|
||||
Pos start(1, 2, o);
|
||||
Pos end(2, 99, o);
|
||||
ASSERT_EQ(start.getSnippetUpTo(end), "sdf\njkl");
|
||||
}
|
||||
{
|
||||
Pos start(1, 4, o);
|
||||
Pos end(2, 99, o);
|
||||
ASSERT_EQ(start.getSnippetUpTo(end), "f\njkl");
|
||||
}
|
||||
{
|
||||
Pos start(1, 5, o);
|
||||
Pos end(2, 99, o);
|
||||
ASSERT_EQ(start.getSnippetUpTo(end), "\njkl");
|
||||
}
|
||||
{
|
||||
Pos start(1, 6, o); // invalid: starting column past last "line character", ie at the newline
|
||||
Pos end(2, 99, o);
|
||||
ASSERT_EQ(start.getSnippetUpTo(end), "\njkl"); // jkl might be acceptable for this invalid start position
|
||||
}
|
||||
{
|
||||
Pos start(1, 1, o);
|
||||
Pos end(2, 0, o); // invalid
|
||||
ASSERT_EQ(start.getSnippetUpTo(end), "asdf\n");
|
||||
}
|
||||
}
|
||||
|
||||
TEST(Position, example_1)
|
||||
{
|
||||
Pos::Origin o = makeStdin(" unambiguous = \n /** Very close */\n x: x;\n# ok\n");
|
||||
Pos start(2, 5, o);
|
||||
Pos end(2, 22, o);
|
||||
ASSERT_EQ(start.getSnippetUpTo(end), "/** Very close */");
|
||||
}
|
||||
|
||||
} // namespace nix
|
Loading…
Reference in a new issue