2023-10-25 19:55:08 +03:00
|
|
|
#include "git-utils.hh"
|
2023-12-11 23:28:53 +02:00
|
|
|
#include "fs-input-accessor.hh"
|
2023-10-25 19:55:08 +03:00
|
|
|
#include "cache.hh"
|
2023-10-27 19:39:00 +03:00
|
|
|
#include "finally.hh"
|
2023-11-09 17:48:41 +02:00
|
|
|
#include "processes.hh"
|
2023-11-15 14:57:20 +02:00
|
|
|
#include "signals.hh"
|
2023-11-29 13:35:08 +02:00
|
|
|
#include "users.hh"
|
2023-12-21 10:49:52 +02:00
|
|
|
#include "fs-sink.hh"
|
2023-10-25 19:55:08 +03:00
|
|
|
|
2023-11-27 23:34:41 +02:00
|
|
|
#include <git2/attr.h>
|
2023-10-25 19:55:08 +03:00
|
|
|
#include <git2/blob.h>
|
|
|
|
#include <git2/commit.h>
|
2023-10-27 19:39:00 +03:00
|
|
|
#include <git2/config.h>
|
2023-10-25 19:55:08 +03:00
|
|
|
#include <git2/describe.h>
|
|
|
|
#include <git2/errors.h>
|
|
|
|
#include <git2/global.h>
|
|
|
|
#include <git2/object.h>
|
|
|
|
#include <git2/refs.h>
|
|
|
|
#include <git2/remote.h>
|
|
|
|
#include <git2/repository.h>
|
2024-03-14 15:04:51 +02:00
|
|
|
#include <git2/revparse.h>
|
2023-10-25 19:55:08 +03:00
|
|
|
#include <git2/status.h>
|
2023-10-27 19:39:00 +03:00
|
|
|
#include <git2/submodule.h>
|
2023-10-25 19:55:08 +03:00
|
|
|
#include <git2/tree.h>
|
|
|
|
|
2023-11-27 23:34:41 +02:00
|
|
|
#include <iostream>
|
2023-10-25 19:55:08 +03:00
|
|
|
#include <unordered_set>
|
|
|
|
#include <queue>
|
2023-11-09 17:48:41 +02:00
|
|
|
#include <regex>
|
2023-11-29 13:38:46 +02:00
|
|
|
#include <span>
|
2023-10-25 19:55:08 +03:00
|
|
|
|
|
|
|
namespace std {
|
|
|
|
|
|
|
|
template<> struct hash<git_oid>
|
|
|
|
{
|
|
|
|
size_t operator()(const git_oid & oid) const
|
|
|
|
{
|
|
|
|
return * (size_t *) oid.id;
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
std::ostream & operator << (std::ostream & str, const git_oid & oid)
|
|
|
|
{
|
|
|
|
str << git_oid_tostr_s(&oid);
|
|
|
|
return str;
|
|
|
|
}
|
|
|
|
|
|
|
|
bool operator == (const git_oid & oid1, const git_oid & oid2)
|
|
|
|
{
|
|
|
|
return git_oid_equal(&oid1, &oid2);
|
|
|
|
}
|
|
|
|
|
|
|
|
namespace nix {
|
|
|
|
|
2023-12-11 20:30:10 +02:00
|
|
|
struct GitInputAccessor;
|
|
|
|
|
2023-10-25 19:55:08 +03:00
|
|
|
// Some wrapper types that ensure that the git_*_free functions get called.
|
|
|
|
template<auto del>
|
|
|
|
struct Deleter
|
|
|
|
{
|
|
|
|
template <typename T>
|
|
|
|
void operator()(T * p) const { del(p); };
|
|
|
|
};
|
|
|
|
|
|
|
|
typedef std::unique_ptr<git_repository, Deleter<git_repository_free>> Repository;
|
|
|
|
typedef std::unique_ptr<git_tree_entry, Deleter<git_tree_entry_free>> TreeEntry;
|
|
|
|
typedef std::unique_ptr<git_tree, Deleter<git_tree_free>> Tree;
|
|
|
|
typedef std::unique_ptr<git_treebuilder, Deleter<git_treebuilder_free>> TreeBuilder;
|
|
|
|
typedef std::unique_ptr<git_blob, Deleter<git_blob_free>> Blob;
|
|
|
|
typedef std::unique_ptr<git_object, Deleter<git_object_free>> Object;
|
|
|
|
typedef std::unique_ptr<git_commit, Deleter<git_commit_free>> Commit;
|
|
|
|
typedef std::unique_ptr<git_reference, Deleter<git_reference_free>> Reference;
|
|
|
|
typedef std::unique_ptr<git_describe_result, Deleter<git_describe_result_free>> DescribeResult;
|
|
|
|
typedef std::unique_ptr<git_status_list, Deleter<git_status_list_free>> StatusList;
|
|
|
|
typedef std::unique_ptr<git_remote, Deleter<git_remote_free>> Remote;
|
2023-10-27 19:39:00 +03:00
|
|
|
typedef std::unique_ptr<git_config, Deleter<git_config_free>> GitConfig;
|
|
|
|
typedef std::unique_ptr<git_config_iterator, Deleter<git_config_iterator_free>> ConfigIterator;
|
2023-10-25 19:55:08 +03:00
|
|
|
|
|
|
|
// A helper to ensure that we don't leak objects returned by libgit2.
|
|
|
|
template<typename T>
|
|
|
|
struct Setter
|
|
|
|
{
|
|
|
|
T & t;
|
|
|
|
typename T::pointer p = nullptr;
|
|
|
|
|
|
|
|
Setter(T & t) : t(t) { }
|
|
|
|
|
|
|
|
~Setter() { if (p) t = T(p); }
|
|
|
|
|
|
|
|
operator typename T::pointer * () { return &p; }
|
|
|
|
};
|
|
|
|
|
|
|
|
Hash toHash(const git_oid & oid)
|
|
|
|
{
|
|
|
|
#ifdef GIT_EXPERIMENTAL_SHA256
|
|
|
|
assert(oid.type == GIT_OID_SHA1);
|
|
|
|
#endif
|
2023-11-28 15:20:27 +02:00
|
|
|
Hash hash(HashAlgorithm::SHA1);
|
2023-10-25 19:55:08 +03:00
|
|
|
memcpy(hash.hash, oid.id, hash.hashSize);
|
|
|
|
return hash;
|
|
|
|
}
|
|
|
|
|
|
|
|
static void initLibGit2()
|
|
|
|
{
|
|
|
|
if (git_libgit2_init() < 0)
|
|
|
|
throw Error("initialising libgit2: %s", git_error_last()->message);
|
|
|
|
}
|
|
|
|
|
|
|
|
git_oid hashToOID(const Hash & hash)
|
|
|
|
{
|
|
|
|
git_oid oid;
|
|
|
|
if (git_oid_fromstr(&oid, hash.gitRev().c_str()))
|
|
|
|
throw Error("cannot convert '%s' to a Git OID", hash.gitRev());
|
|
|
|
return oid;
|
|
|
|
}
|
|
|
|
|
|
|
|
Object lookupObject(git_repository * repo, const git_oid & oid)
|
|
|
|
{
|
|
|
|
Object obj;
|
|
|
|
if (git_object_lookup(Setter(obj), repo, &oid, GIT_OBJECT_ANY)) {
|
|
|
|
auto err = git_error_last();
|
|
|
|
throw Error("getting Git object '%s': %s", oid, err->message);
|
|
|
|
}
|
|
|
|
return obj;
|
|
|
|
}
|
|
|
|
|
|
|
|
template<typename T>
|
|
|
|
T peelObject(git_repository * repo, git_object * obj, git_object_t type)
|
|
|
|
{
|
|
|
|
T obj2;
|
|
|
|
if (git_object_peel((git_object * *) (typename T::pointer *) Setter(obj2), obj, type)) {
|
|
|
|
auto err = git_error_last();
|
|
|
|
throw Error("peeling Git object '%s': %s", git_object_id(obj), err->message);
|
|
|
|
}
|
|
|
|
return obj2;
|
|
|
|
}
|
|
|
|
|
|
|
|
struct GitRepoImpl : GitRepo, std::enable_shared_from_this<GitRepoImpl>
|
|
|
|
{
|
2024-01-18 23:25:30 +02:00
|
|
|
/** Location of the repository on disk. */
|
2024-02-06 23:23:58 +02:00
|
|
|
std::filesystem::path path;
|
2023-10-25 19:55:08 +03:00
|
|
|
Repository repo;
|
|
|
|
|
2024-02-06 23:23:58 +02:00
|
|
|
GitRepoImpl(std::filesystem::path _path, bool create, bool bare)
|
2023-10-25 19:55:08 +03:00
|
|
|
: path(std::move(_path))
|
|
|
|
{
|
|
|
|
initLibGit2();
|
|
|
|
|
2023-09-03 00:35:16 +03:00
|
|
|
if (pathExists(path.string())) {
|
|
|
|
if (git_repository_open(Setter(repo), path.string().c_str()))
|
2023-10-25 19:55:08 +03:00
|
|
|
throw Error("opening Git repository '%s': %s", path, git_error_last()->message);
|
|
|
|
} else {
|
2023-09-03 00:35:16 +03:00
|
|
|
if (git_repository_init(Setter(repo), path.string().c_str(), bare))
|
2023-10-25 19:55:08 +03:00
|
|
|
throw Error("creating Git repository '%s': %s", path, git_error_last()->message);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
operator git_repository * ()
|
|
|
|
{
|
|
|
|
return repo.get();
|
|
|
|
}
|
|
|
|
|
|
|
|
uint64_t getRevCount(const Hash & rev) override
|
|
|
|
{
|
|
|
|
std::unordered_set<git_oid> done;
|
|
|
|
std::queue<Commit> todo;
|
|
|
|
|
|
|
|
todo.push(peelObject<Commit>(*this, lookupObject(*this, hashToOID(rev)).get(), GIT_OBJECT_COMMIT));
|
|
|
|
|
|
|
|
while (auto commit = pop(todo)) {
|
|
|
|
if (!done.insert(*git_commit_id(commit->get())).second) continue;
|
|
|
|
|
|
|
|
for (size_t n = 0; n < git_commit_parentcount(commit->get()); ++n) {
|
|
|
|
git_commit * parent;
|
|
|
|
if (git_commit_parent(&parent, commit->get(), n))
|
|
|
|
throw Error("getting parent of Git commit '%s': %s", *git_commit_id(commit->get()), git_error_last()->message);
|
|
|
|
todo.push(Commit(parent));
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return done.size();
|
|
|
|
}
|
|
|
|
|
|
|
|
uint64_t getLastModified(const Hash & rev) override
|
|
|
|
{
|
|
|
|
auto commit = peelObject<Commit>(*this, lookupObject(*this, hashToOID(rev)).get(), GIT_OBJECT_COMMIT);
|
|
|
|
|
|
|
|
return git_commit_time(commit.get());
|
|
|
|
}
|
|
|
|
|
|
|
|
bool isShallow() override
|
|
|
|
{
|
|
|
|
return git_repository_is_shallow(*this);
|
|
|
|
}
|
|
|
|
|
2024-03-15 10:30:58 +02:00
|
|
|
void setRemote(const std::string & name, const std::string & url) override
|
|
|
|
{
|
|
|
|
if (git_remote_set_url(*this, name.c_str(), url.c_str()))
|
|
|
|
throw Error("setting remote '%s' URL to '%s': %s", name, url, git_error_last()->message);
|
|
|
|
}
|
|
|
|
|
2023-10-25 19:55:08 +03:00
|
|
|
Hash resolveRef(std::string ref) override
|
|
|
|
{
|
2024-03-14 15:04:51 +02:00
|
|
|
Object object;
|
|
|
|
if (git_revparse_single(Setter(object), *this, ref.c_str()))
|
2023-10-25 19:55:08 +03:00
|
|
|
throw Error("resolving Git reference '%s': %s", ref, git_error_last()->message);
|
2024-03-14 15:04:51 +02:00
|
|
|
auto oid = git_object_id(object.get());
|
2023-10-25 19:55:08 +03:00
|
|
|
return toHash(*oid);
|
|
|
|
}
|
|
|
|
|
2024-02-06 23:23:58 +02:00
|
|
|
std::vector<Submodule> parseSubmodules(const std::filesystem::path & configFile)
|
2023-10-31 16:59:25 +02:00
|
|
|
{
|
|
|
|
GitConfig config;
|
2023-09-03 00:35:16 +03:00
|
|
|
if (git_config_open_ondisk(Setter(config), configFile.string().c_str()))
|
2023-10-31 16:59:25 +02:00
|
|
|
throw Error("parsing .gitmodules file: %s", git_error_last()->message);
|
|
|
|
|
|
|
|
ConfigIterator it;
|
|
|
|
if (git_config_iterator_glob_new(Setter(it), config.get(), "^submodule\\..*\\.(path|url|branch)$"))
|
|
|
|
throw Error("iterating over .gitmodules: %s", git_error_last()->message);
|
|
|
|
|
|
|
|
std::map<std::string, std::string> entries;
|
|
|
|
|
|
|
|
while (true) {
|
|
|
|
git_config_entry * entry = nullptr;
|
|
|
|
if (auto err = git_config_next(&entry, it.get())) {
|
|
|
|
if (err == GIT_ITEROVER) break;
|
|
|
|
throw Error("iterating over .gitmodules: %s", git_error_last()->message);
|
|
|
|
}
|
|
|
|
entries.emplace(entry->name + 10, entry->value);
|
|
|
|
}
|
|
|
|
|
|
|
|
std::vector<Submodule> result;
|
|
|
|
|
|
|
|
for (auto & [key, value] : entries) {
|
|
|
|
if (!hasSuffix(key, ".path")) continue;
|
|
|
|
std::string key2(key, 0, key.size() - 5);
|
|
|
|
auto path = CanonPath(value);
|
|
|
|
result.push_back(Submodule {
|
|
|
|
.path = path,
|
|
|
|
.url = entries[key2 + ".url"],
|
|
|
|
.branch = entries[key2 + ".branch"],
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
return result;
|
|
|
|
}
|
|
|
|
|
2023-11-14 14:35:26 +02:00
|
|
|
// Helper for statusCallback below.
|
|
|
|
static int statusCallbackTrampoline(const char * path, unsigned int statusFlags, void * payload)
|
|
|
|
{
|
|
|
|
return (*((std::function<int(const char * path, unsigned int statusFlags)> *) payload))(path, statusFlags);
|
|
|
|
}
|
|
|
|
|
2023-10-25 19:55:08 +03:00
|
|
|
WorkdirInfo getWorkdirInfo() override
|
|
|
|
{
|
|
|
|
WorkdirInfo info;
|
|
|
|
|
|
|
|
/* Get the head revision, if any. */
|
|
|
|
git_oid headRev;
|
|
|
|
if (auto err = git_reference_name_to_id(&headRev, *this, "HEAD")) {
|
|
|
|
if (err != GIT_ENOTFOUND)
|
|
|
|
throw Error("resolving HEAD: %s", git_error_last()->message);
|
|
|
|
} else
|
|
|
|
info.headRev = toHash(headRev);
|
|
|
|
|
|
|
|
/* Get all tracked files and determine whether the working
|
|
|
|
directory is dirty. */
|
|
|
|
std::function<int(const char * path, unsigned int statusFlags)> statusCallback = [&](const char * path, unsigned int statusFlags)
|
|
|
|
{
|
|
|
|
if (!(statusFlags & GIT_STATUS_INDEX_DELETED) &&
|
|
|
|
!(statusFlags & GIT_STATUS_WT_DELETED))
|
|
|
|
info.files.insert(CanonPath(path));
|
|
|
|
if (statusFlags != GIT_STATUS_CURRENT)
|
|
|
|
info.isDirty = true;
|
|
|
|
return 0;
|
|
|
|
};
|
|
|
|
|
|
|
|
git_status_options options = GIT_STATUS_OPTIONS_INIT;
|
|
|
|
options.flags |= GIT_STATUS_OPT_INCLUDE_UNMODIFIED;
|
|
|
|
options.flags |= GIT_STATUS_OPT_EXCLUDE_SUBMODULES;
|
|
|
|
if (git_status_foreach_ext(*this, &options, &statusCallbackTrampoline, &statusCallback))
|
|
|
|
throw Error("getting working directory status: %s", git_error_last()->message);
|
|
|
|
|
2023-10-31 16:59:25 +02:00
|
|
|
/* Get submodule info. */
|
2024-02-05 16:13:11 +02:00
|
|
|
auto modulesFile = path / ".gitmodules";
|
2023-09-03 00:35:16 +03:00
|
|
|
if (pathExists(modulesFile.string()))
|
2023-10-31 16:59:25 +02:00
|
|
|
info.submodules = parseSubmodules(modulesFile);
|
|
|
|
|
2023-10-25 19:55:08 +03:00
|
|
|
return info;
|
|
|
|
}
|
|
|
|
|
|
|
|
std::optional<std::string> getWorkdirRef() override
|
|
|
|
{
|
|
|
|
Reference ref;
|
|
|
|
if (git_reference_lookup(Setter(ref), *this, "HEAD"))
|
|
|
|
throw Error("looking up HEAD: %s", git_error_last()->message);
|
|
|
|
|
|
|
|
if (auto target = git_reference_symbolic_target(ref.get()))
|
|
|
|
return target;
|
|
|
|
|
|
|
|
return std::nullopt;
|
|
|
|
}
|
|
|
|
|
2023-11-27 23:34:41 +02:00
|
|
|
std::vector<std::tuple<Submodule, Hash>> getSubmodules(const Hash & rev, bool exportIgnore) override;
|
2023-10-27 19:39:00 +03:00
|
|
|
|
2024-03-15 10:30:58 +02:00
|
|
|
std::string resolveSubmoduleUrl(const std::string & url) override
|
2023-10-27 19:39:00 +03:00
|
|
|
{
|
|
|
|
git_buf buf = GIT_BUF_INIT;
|
|
|
|
if (git_submodule_resolve_url(&buf, *this, url.c_str()))
|
|
|
|
throw Error("resolving Git submodule URL '%s'", url);
|
|
|
|
Finally cleanup = [&]() { git_buf_dispose(&buf); };
|
2023-11-14 17:00:21 +02:00
|
|
|
|
|
|
|
std::string res(buf.ptr);
|
|
|
|
return res;
|
2023-10-27 19:39:00 +03:00
|
|
|
}
|
|
|
|
|
2023-10-25 19:55:08 +03:00
|
|
|
bool hasObject(const Hash & oid_) override
|
|
|
|
{
|
|
|
|
auto oid = hashToOID(oid_);
|
|
|
|
|
|
|
|
Object obj;
|
|
|
|
if (auto errCode = git_object_lookup(Setter(obj), *this, &oid, GIT_OBJECT_ANY)) {
|
|
|
|
if (errCode == GIT_ENOTFOUND) return false;
|
|
|
|
auto err = git_error_last();
|
|
|
|
throw Error("getting Git object '%s': %s", oid, err->message);
|
|
|
|
}
|
|
|
|
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
2023-12-11 20:30:10 +02:00
|
|
|
/**
|
|
|
|
* A 'GitInputAccessor' with no regard for export-ignore or any other transformations.
|
|
|
|
*/
|
|
|
|
ref<GitInputAccessor> getRawAccessor(const Hash & rev);
|
|
|
|
|
2024-05-03 13:14:01 +03:00
|
|
|
ref<SourceAccessor> getAccessor(const Hash & rev, bool exportIgnore) override;
|
2023-10-25 19:55:08 +03:00
|
|
|
|
2024-05-03 13:14:01 +03:00
|
|
|
ref<SourceAccessor> getAccessor(const WorkdirInfo & wd, bool exportIgnore, MakeNotAllowedError e) override;
|
2023-10-25 19:55:08 +03:00
|
|
|
|
2023-12-21 10:49:52 +02:00
|
|
|
ref<GitFileSystemObjectSink> getFileSystemObjectSink() override;
|
|
|
|
|
2023-11-15 14:57:20 +02:00
|
|
|
static int sidebandProgressCallback(const char * str, int len, void * payload)
|
|
|
|
{
|
|
|
|
auto act = (Activity *) payload;
|
|
|
|
act->result(resFetchStatus, trim(std::string_view(str, len)));
|
2024-04-04 19:25:01 +03:00
|
|
|
return getInterrupted() ? -1 : 0;
|
2023-11-15 14:57:20 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
static int transferProgressCallback(const git_indexer_progress * stats, void * payload)
|
|
|
|
{
|
|
|
|
auto act = (Activity *) payload;
|
|
|
|
act->result(resFetchStatus,
|
|
|
|
fmt("%d/%d objects received, %d/%d deltas indexed, %.1f MiB",
|
|
|
|
stats->received_objects,
|
|
|
|
stats->total_objects,
|
|
|
|
stats->indexed_deltas,
|
|
|
|
stats->total_deltas,
|
|
|
|
stats->received_bytes / (1024.0 * 1024.0)));
|
2024-04-04 19:25:01 +03:00
|
|
|
return getInterrupted() ? -1 : 0;
|
2023-11-15 14:57:20 +02:00
|
|
|
}
|
|
|
|
|
2023-10-25 19:55:08 +03:00
|
|
|
void fetch(
|
|
|
|
const std::string & url,
|
2023-11-15 15:43:30 +02:00
|
|
|
const std::string & refspec,
|
|
|
|
bool shallow) override
|
2023-10-25 19:55:08 +03:00
|
|
|
{
|
2023-11-15 14:57:20 +02:00
|
|
|
Activity act(*logger, lvlTalkative, actFetchTree, fmt("fetching Git repository '%s'", url));
|
|
|
|
|
2024-01-18 23:26:24 +02:00
|
|
|
// TODO: implement git-credential helper support (preferably via libgit2, which as of 2024-01 does not support that)
|
|
|
|
// then use code that was removed in this commit (see blame)
|
2023-10-25 19:55:08 +03:00
|
|
|
|
2024-01-18 23:26:24 +02:00
|
|
|
auto dir = this->path;
|
2024-01-19 10:59:15 +02:00
|
|
|
Strings gitArgs;
|
|
|
|
if (shallow) {
|
2023-09-03 00:35:16 +03:00
|
|
|
gitArgs = { "-C", dir.string(), "fetch", "--quiet", "--force", "--depth", "1", "--", url, refspec };
|
2024-01-19 10:59:15 +02:00
|
|
|
}
|
|
|
|
else {
|
2023-09-03 00:35:16 +03:00
|
|
|
gitArgs = { "-C", dir.string(), "fetch", "--quiet", "--force", "--", url, refspec };
|
2024-01-19 10:59:15 +02:00
|
|
|
}
|
2023-11-15 14:57:20 +02:00
|
|
|
|
2024-01-18 23:26:24 +02:00
|
|
|
runProgram(RunOptions {
|
|
|
|
.program = "git",
|
2024-04-13 18:35:15 +03:00
|
|
|
.lookupPath = true,
|
2024-01-18 23:26:24 +02:00
|
|
|
// FIXME: git stderr messes up our progress indicator, so
|
|
|
|
// we're using --quiet for now. Should process its stderr.
|
2024-01-19 10:59:15 +02:00
|
|
|
.args = gitArgs,
|
2024-01-18 23:26:24 +02:00
|
|
|
.input = {},
|
|
|
|
.isInteractive = true
|
|
|
|
});
|
2023-10-25 19:55:08 +03:00
|
|
|
}
|
2023-11-09 17:48:41 +02:00
|
|
|
|
|
|
|
void verifyCommit(
|
|
|
|
const Hash & rev,
|
|
|
|
const std::vector<fetchers::PublicKey> & publicKeys) override
|
|
|
|
{
|
|
|
|
// Create ad-hoc allowedSignersFile and populate it with publicKeys
|
|
|
|
auto allowedSignersFile = createTempFile().second;
|
|
|
|
std::string allowedSigners;
|
|
|
|
for (const fetchers::PublicKey & k : publicKeys) {
|
|
|
|
if (k.type != "ssh-dsa"
|
|
|
|
&& k.type != "ssh-ecdsa"
|
|
|
|
&& k.type != "ssh-ecdsa-sk"
|
|
|
|
&& k.type != "ssh-ed25519"
|
|
|
|
&& k.type != "ssh-ed25519-sk"
|
|
|
|
&& k.type != "ssh-rsa")
|
|
|
|
throw Error("Unknown key type '%s'.\n"
|
|
|
|
"Please use one of\n"
|
|
|
|
"- ssh-dsa\n"
|
|
|
|
" ssh-ecdsa\n"
|
|
|
|
" ssh-ecdsa-sk\n"
|
|
|
|
" ssh-ed25519\n"
|
|
|
|
" ssh-ed25519-sk\n"
|
|
|
|
" ssh-rsa", k.type);
|
|
|
|
allowedSigners += "* " + k.type + " " + k.key + "\n";
|
|
|
|
}
|
|
|
|
writeFile(allowedSignersFile, allowedSigners);
|
|
|
|
|
|
|
|
// Run verification command
|
|
|
|
auto [status, output] = runProgram(RunOptions {
|
|
|
|
.program = "git",
|
|
|
|
.args = {
|
|
|
|
"-c",
|
|
|
|
"gpg.ssh.allowedSignersFile=" + allowedSignersFile,
|
2023-09-03 00:35:16 +03:00
|
|
|
"-C", path.string(),
|
2023-11-09 17:48:41 +02:00
|
|
|
"verify-commit",
|
|
|
|
rev.gitRev()
|
|
|
|
},
|
|
|
|
.mergeStderrToStdout = true,
|
|
|
|
});
|
|
|
|
|
|
|
|
/* Evaluate result through status code and checking if public
|
|
|
|
key fingerprints appear on stderr. This is neccessary
|
|
|
|
because the git command might also succeed due to the
|
|
|
|
commit being signed by gpg keys that are present in the
|
|
|
|
users key agent. */
|
|
|
|
std::string re = R"(Good "git" signature for \* with .* key SHA256:[)";
|
|
|
|
for (const fetchers::PublicKey & k : publicKeys){
|
|
|
|
// Calculate sha256 fingerprint from public key and escape the regex symbol '+' to match the key literally
|
2023-11-28 15:20:27 +02:00
|
|
|
auto fingerprint = trim(hashString(HashAlgorithm::SHA256, base64Decode(k.key)).to_string(nix::HashFormat::Base64, false), "=");
|
2023-11-09 17:48:41 +02:00
|
|
|
auto escaped_fingerprint = std::regex_replace(fingerprint, std::regex("\\+"), "\\+" );
|
|
|
|
re += "(" + escaped_fingerprint + ")";
|
|
|
|
}
|
|
|
|
re += "]";
|
|
|
|
if (status == 0 && std::regex_search(output, std::regex(re)))
|
|
|
|
printTalkative("Signature verification on commit %s succeeded.", rev.gitRev());
|
|
|
|
else
|
|
|
|
throw Error("Commit signature verification on commit %s failed: %s", rev.gitRev(), output);
|
|
|
|
}
|
2024-02-20 13:57:36 +02:00
|
|
|
|
|
|
|
Hash treeHashToNarHash(const Hash & treeHash) override
|
|
|
|
{
|
|
|
|
auto accessor = getAccessor(treeHash, false);
|
|
|
|
|
|
|
|
fetchers::Attrs cacheKey({{"_what", "treeHashToNarHash"}, {"treeHash", treeHash.gitRev()}});
|
|
|
|
|
|
|
|
if (auto res = fetchers::getCache()->lookup(cacheKey))
|
|
|
|
return Hash::parseAny(fetchers::getStrAttr(*res, "narHash"), HashAlgorithm::SHA256);
|
|
|
|
|
|
|
|
auto narHash = accessor->hashPath(CanonPath::root);
|
|
|
|
|
|
|
|
fetchers::getCache()->upsert(cacheKey, fetchers::Attrs({{"narHash", narHash.to_string(HashFormat::SRI, true)}}));
|
|
|
|
|
|
|
|
return narHash;
|
|
|
|
}
|
2023-10-25 19:55:08 +03:00
|
|
|
};
|
|
|
|
|
2024-02-06 23:23:58 +02:00
|
|
|
ref<GitRepo> GitRepo::openRepo(const std::filesystem::path & path, bool create, bool bare)
|
2023-10-25 19:55:08 +03:00
|
|
|
{
|
|
|
|
return make_ref<GitRepoImpl>(path, create, bare);
|
|
|
|
}
|
|
|
|
|
2023-12-11 20:32:18 +02:00
|
|
|
/**
|
|
|
|
* Raw git tree input accessor.
|
|
|
|
*/
|
2024-05-03 13:14:01 +03:00
|
|
|
struct GitInputAccessor : SourceAccessor
|
2023-10-25 19:55:08 +03:00
|
|
|
{
|
|
|
|
ref<GitRepoImpl> repo;
|
|
|
|
Tree root;
|
|
|
|
|
|
|
|
GitInputAccessor(ref<GitRepoImpl> repo_, const Hash & rev)
|
|
|
|
: repo(repo_)
|
|
|
|
, root(peelObject<Tree>(*repo, lookupObject(*repo, hashToOID(rev)).get(), GIT_OBJECT_TREE))
|
|
|
|
{
|
|
|
|
}
|
|
|
|
|
|
|
|
std::string readBlob(const CanonPath & path, bool symlink)
|
|
|
|
{
|
|
|
|
auto blob = getBlob(path, symlink);
|
|
|
|
|
|
|
|
auto data = std::string_view((const char *) git_blob_rawcontent(blob.get()), git_blob_rawsize(blob.get()));
|
|
|
|
|
|
|
|
return std::string(data);
|
|
|
|
}
|
|
|
|
|
|
|
|
std::string readFile(const CanonPath & path) override
|
|
|
|
{
|
|
|
|
return readBlob(path, false);
|
|
|
|
}
|
|
|
|
|
|
|
|
bool pathExists(const CanonPath & path) override
|
|
|
|
{
|
|
|
|
return path.isRoot() ? true : (bool) lookup(path);
|
|
|
|
}
|
|
|
|
|
2023-11-02 17:16:37 +02:00
|
|
|
std::optional<Stat> maybeLstat(const CanonPath & path) override
|
2023-10-25 19:55:08 +03:00
|
|
|
{
|
|
|
|
if (path.isRoot())
|
|
|
|
return Stat { .type = tDirectory };
|
|
|
|
|
2023-11-02 17:16:37 +02:00
|
|
|
auto entry = lookup(path);
|
|
|
|
if (!entry)
|
|
|
|
return std::nullopt;
|
2023-10-25 19:55:08 +03:00
|
|
|
|
|
|
|
auto mode = git_tree_entry_filemode(entry);
|
|
|
|
|
|
|
|
if (mode == GIT_FILEMODE_TREE)
|
|
|
|
return Stat { .type = tDirectory };
|
|
|
|
|
|
|
|
else if (mode == GIT_FILEMODE_BLOB)
|
|
|
|
return Stat { .type = tRegular };
|
|
|
|
|
|
|
|
else if (mode == GIT_FILEMODE_BLOB_EXECUTABLE)
|
|
|
|
return Stat { .type = tRegular, .isExecutable = true };
|
|
|
|
|
|
|
|
else if (mode == GIT_FILEMODE_LINK)
|
|
|
|
return Stat { .type = tSymlink };
|
|
|
|
|
|
|
|
else if (mode == GIT_FILEMODE_COMMIT)
|
|
|
|
// Treat submodules as an empty directory.
|
|
|
|
return Stat { .type = tDirectory };
|
|
|
|
|
|
|
|
else
|
|
|
|
throw Error("file '%s' has an unsupported Git file type");
|
|
|
|
}
|
|
|
|
|
|
|
|
DirEntries readDirectory(const CanonPath & path) override
|
|
|
|
{
|
|
|
|
return std::visit(overloaded {
|
|
|
|
[&](Tree tree) {
|
|
|
|
DirEntries res;
|
|
|
|
|
|
|
|
auto count = git_tree_entrycount(tree.get());
|
|
|
|
|
|
|
|
for (size_t n = 0; n < count; ++n) {
|
|
|
|
auto entry = git_tree_entry_byindex(tree.get(), n);
|
|
|
|
// FIXME: add to cache
|
|
|
|
res.emplace(std::string(git_tree_entry_name(entry)), DirEntry{});
|
|
|
|
}
|
|
|
|
|
|
|
|
return res;
|
|
|
|
},
|
|
|
|
[&](Submodule) {
|
|
|
|
return DirEntries();
|
|
|
|
}
|
|
|
|
}, getTree(path));
|
|
|
|
}
|
|
|
|
|
|
|
|
std::string readLink(const CanonPath & path) override
|
|
|
|
{
|
|
|
|
return readBlob(path, true);
|
|
|
|
}
|
|
|
|
|
2023-10-27 19:39:00 +03:00
|
|
|
Hash getSubmoduleRev(const CanonPath & path)
|
|
|
|
{
|
|
|
|
auto entry = need(path);
|
|
|
|
|
|
|
|
if (git_tree_entry_type(entry) != GIT_OBJECT_COMMIT)
|
|
|
|
throw Error("'%s' is not a submodule", showPath(path));
|
|
|
|
|
|
|
|
return toHash(*git_tree_entry_id(entry));
|
|
|
|
}
|
|
|
|
|
2023-12-06 14:45:59 +02:00
|
|
|
std::unordered_map<CanonPath, TreeEntry> lookupCache;
|
2023-10-25 19:55:08 +03:00
|
|
|
|
|
|
|
/* Recursively look up 'path' relative to the root. */
|
|
|
|
git_tree_entry * lookup(const CanonPath & path)
|
|
|
|
{
|
|
|
|
auto i = lookupCache.find(path);
|
2024-02-14 20:07:18 +02:00
|
|
|
if (i != lookupCache.end()) return i->second.get();
|
|
|
|
|
|
|
|
auto parent = path.parent();
|
|
|
|
if (!parent) return nullptr;
|
|
|
|
|
|
|
|
auto name = path.baseName().value();
|
|
|
|
|
|
|
|
auto parentTree = lookupTree(*parent);
|
|
|
|
if (!parentTree) return nullptr;
|
|
|
|
|
|
|
|
auto count = git_tree_entrycount(parentTree->get());
|
|
|
|
|
|
|
|
git_tree_entry * res = nullptr;
|
2023-10-25 19:55:08 +03:00
|
|
|
|
2024-02-14 20:07:18 +02:00
|
|
|
/* Add all the tree entries to the cache to speed up
|
|
|
|
subsequent lookups. */
|
|
|
|
for (size_t n = 0; n < count; ++n) {
|
|
|
|
auto entry = git_tree_entry_byindex(parentTree->get(), n);
|
|
|
|
|
|
|
|
TreeEntry copy;
|
|
|
|
if (git_tree_entry_dup(Setter(copy), entry))
|
|
|
|
throw Error("dupping tree entry: %s", git_error_last()->message);
|
|
|
|
|
|
|
|
auto entryName = std::string_view(git_tree_entry_name(entry));
|
|
|
|
|
|
|
|
if (entryName == name)
|
|
|
|
res = copy.get();
|
2023-10-25 19:55:08 +03:00
|
|
|
|
2024-02-14 20:07:18 +02:00
|
|
|
auto path2 = *parent;
|
|
|
|
path2.push(entryName);
|
|
|
|
lookupCache.emplace(path2, std::move(copy)).first->second.get();
|
2023-10-25 19:55:08 +03:00
|
|
|
}
|
|
|
|
|
2024-02-14 20:07:18 +02:00
|
|
|
return res;
|
|
|
|
}
|
|
|
|
|
|
|
|
std::optional<Tree> lookupTree(const CanonPath & path)
|
|
|
|
{
|
|
|
|
if (path.isRoot()) {
|
|
|
|
Tree tree;
|
|
|
|
if (git_tree_dup(Setter(tree), root.get()))
|
|
|
|
throw Error("duplicating directory '%s': %s", showPath(path), git_error_last()->message);
|
|
|
|
return tree;
|
|
|
|
}
|
|
|
|
|
|
|
|
auto entry = lookup(path);
|
|
|
|
if (!entry || git_tree_entry_type(entry) != GIT_OBJECT_TREE)
|
|
|
|
return std::nullopt;
|
|
|
|
|
|
|
|
Tree tree;
|
|
|
|
if (git_tree_entry_to_object((git_object * *) (git_tree * *) Setter(tree), *repo, entry))
|
|
|
|
throw Error("looking up directory '%s': %s", showPath(path), git_error_last()->message);
|
|
|
|
|
|
|
|
return tree;
|
2023-10-25 19:55:08 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
git_tree_entry * need(const CanonPath & path)
|
|
|
|
{
|
|
|
|
auto entry = lookup(path);
|
|
|
|
if (!entry)
|
|
|
|
throw Error("'%s' does not exist", showPath(path));
|
|
|
|
return entry;
|
|
|
|
}
|
|
|
|
|
|
|
|
struct Submodule { };
|
|
|
|
|
|
|
|
std::variant<Tree, Submodule> getTree(const CanonPath & path)
|
|
|
|
{
|
|
|
|
if (path.isRoot()) {
|
|
|
|
Tree tree;
|
|
|
|
if (git_tree_dup(Setter(tree), root.get()))
|
|
|
|
throw Error("duplicating directory '%s': %s", showPath(path), git_error_last()->message);
|
|
|
|
return tree;
|
|
|
|
}
|
|
|
|
|
|
|
|
auto entry = need(path);
|
|
|
|
|
|
|
|
if (git_tree_entry_type(entry) == GIT_OBJECT_COMMIT)
|
|
|
|
return Submodule();
|
|
|
|
|
|
|
|
if (git_tree_entry_type(entry) != GIT_OBJECT_TREE)
|
|
|
|
throw Error("'%s' is not a directory", showPath(path));
|
|
|
|
|
|
|
|
Tree tree;
|
|
|
|
if (git_tree_entry_to_object((git_object * *) (git_tree * *) Setter(tree), *repo, entry))
|
|
|
|
throw Error("looking up directory '%s': %s", showPath(path), git_error_last()->message);
|
|
|
|
|
|
|
|
return tree;
|
|
|
|
}
|
|
|
|
|
|
|
|
Blob getBlob(const CanonPath & path, bool expectSymlink)
|
|
|
|
{
|
|
|
|
auto notExpected = [&]()
|
|
|
|
{
|
|
|
|
throw Error(
|
|
|
|
expectSymlink
|
|
|
|
? "'%s' is not a symlink"
|
|
|
|
: "'%s' is not a regular file",
|
|
|
|
showPath(path));
|
|
|
|
};
|
|
|
|
|
|
|
|
if (path.isRoot()) notExpected();
|
|
|
|
|
|
|
|
auto entry = need(path);
|
|
|
|
|
|
|
|
if (git_tree_entry_type(entry) != GIT_OBJECT_BLOB)
|
|
|
|
notExpected();
|
|
|
|
|
|
|
|
auto mode = git_tree_entry_filemode(entry);
|
|
|
|
if (expectSymlink) {
|
|
|
|
if (mode != GIT_FILEMODE_LINK)
|
|
|
|
notExpected();
|
|
|
|
} else {
|
|
|
|
if (mode != GIT_FILEMODE_BLOB && mode != GIT_FILEMODE_BLOB_EXECUTABLE)
|
|
|
|
notExpected();
|
|
|
|
}
|
|
|
|
|
|
|
|
Blob blob;
|
|
|
|
if (git_tree_entry_to_object((git_object * *) (git_blob * *) Setter(blob), *repo, entry))
|
|
|
|
throw Error("looking up file '%s': %s", showPath(path), git_error_last()->message);
|
|
|
|
|
|
|
|
return blob;
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2024-01-12 18:18:56 +02:00
|
|
|
struct GitExportIgnoreInputAccessor : CachingFilteringInputAccessor {
|
2023-12-11 20:32:18 +02:00
|
|
|
ref<GitRepoImpl> repo;
|
2023-12-11 23:36:08 +02:00
|
|
|
std::optional<Hash> rev;
|
2023-12-11 20:32:18 +02:00
|
|
|
|
2024-05-03 13:14:01 +03:00
|
|
|
GitExportIgnoreInputAccessor(ref<GitRepoImpl> repo, ref<SourceAccessor> next, std::optional<Hash> rev)
|
2024-01-12 18:18:56 +02:00
|
|
|
: CachingFilteringInputAccessor(next, [&](const CanonPath & path) {
|
2023-12-11 20:32:18 +02:00
|
|
|
return RestrictedPathError(fmt("'%s' does not exist because it was fetched with exportIgnore enabled", path));
|
|
|
|
})
|
|
|
|
, repo(repo)
|
2023-12-11 23:36:08 +02:00
|
|
|
, rev(rev)
|
2023-12-11 20:32:18 +02:00
|
|
|
{ }
|
|
|
|
|
2023-12-11 23:36:08 +02:00
|
|
|
bool gitAttrGet(const CanonPath & path, const char * attrName, const char * & valueOut)
|
|
|
|
{
|
2024-01-12 23:55:37 +02:00
|
|
|
const char * pathCStr = path.rel_c_str();
|
2023-12-11 23:36:08 +02:00
|
|
|
|
|
|
|
if (rev) {
|
|
|
|
git_attr_options opts = GIT_ATTR_OPTIONS_INIT;
|
|
|
|
opts.attr_commit_id = hashToOID(*rev);
|
|
|
|
// TODO: test that gitattributes from global and system are not used
|
|
|
|
// (ie more or less: home and etc - both of them!)
|
|
|
|
opts.flags = GIT_ATTR_CHECK_INCLUDE_COMMIT | GIT_ATTR_CHECK_NO_SYSTEM;
|
|
|
|
return git_attr_get_ext(
|
|
|
|
&valueOut,
|
|
|
|
*repo,
|
|
|
|
&opts,
|
|
|
|
pathCStr,
|
|
|
|
attrName
|
|
|
|
);
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
return git_attr_get(
|
|
|
|
&valueOut,
|
|
|
|
*repo,
|
|
|
|
GIT_ATTR_CHECK_INDEX_ONLY | GIT_ATTR_CHECK_NO_SYSTEM,
|
|
|
|
pathCStr,
|
|
|
|
attrName);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-01-12 15:02:58 +02:00
|
|
|
bool isExportIgnored(const CanonPath & path)
|
|
|
|
{
|
2023-12-11 20:32:18 +02:00
|
|
|
const char *exportIgnoreEntry = nullptr;
|
|
|
|
|
|
|
|
// GIT_ATTR_CHECK_INDEX_ONLY:
|
|
|
|
// > It will use index only for creating archives or for a bare repo
|
|
|
|
// > (if an index has been specified for the bare repo).
|
|
|
|
// -- https://github.com/libgit2/libgit2/blob/HEAD/include/git2/attr.h#L113C62-L115C48
|
2023-12-11 23:36:08 +02:00
|
|
|
if (gitAttrGet(path, "export-ignore", exportIgnoreEntry)) {
|
2023-12-11 20:32:18 +02:00
|
|
|
if (git_error_last()->klass == GIT_ENOTFOUND)
|
|
|
|
return false;
|
|
|
|
else
|
|
|
|
throw Error("looking up '%s': %s", showPath(path), git_error_last()->message);
|
|
|
|
}
|
|
|
|
else {
|
|
|
|
// Official git will silently reject export-ignore lines that have
|
|
|
|
// values. We do the same.
|
|
|
|
return GIT_ATTR_IS_TRUE(exportIgnoreEntry);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-01-12 18:18:56 +02:00
|
|
|
bool isAllowedUncached(const CanonPath & path) override
|
2024-01-12 15:02:58 +02:00
|
|
|
{
|
2023-12-11 20:32:18 +02:00
|
|
|
return !isExportIgnored(path);
|
|
|
|
}
|
|
|
|
|
|
|
|
};
|
|
|
|
|
2023-12-21 10:49:52 +02:00
|
|
|
struct GitFileSystemObjectSinkImpl : GitFileSystemObjectSink
|
|
|
|
{
|
|
|
|
ref<GitRepoImpl> repo;
|
|
|
|
|
|
|
|
struct PendingDir
|
|
|
|
{
|
|
|
|
std::string name;
|
|
|
|
TreeBuilder builder;
|
|
|
|
};
|
|
|
|
|
|
|
|
std::vector<PendingDir> pendingDirs;
|
|
|
|
|
|
|
|
size_t componentsToStrip = 1;
|
|
|
|
|
|
|
|
void pushBuilder(std::string name)
|
|
|
|
{
|
|
|
|
git_treebuilder * b;
|
|
|
|
if (git_treebuilder_new(&b, *repo, nullptr))
|
|
|
|
throw Error("creating a tree builder: %s", git_error_last()->message);
|
|
|
|
pendingDirs.push_back({ .name = std::move(name), .builder = TreeBuilder(b) });
|
|
|
|
};
|
|
|
|
|
|
|
|
GitFileSystemObjectSinkImpl(ref<GitRepoImpl> repo) : repo(repo)
|
|
|
|
{
|
|
|
|
pushBuilder("");
|
|
|
|
}
|
|
|
|
|
|
|
|
std::pair<git_oid, std::string> popBuilder()
|
|
|
|
{
|
|
|
|
assert(!pendingDirs.empty());
|
|
|
|
auto pending = std::move(pendingDirs.back());
|
|
|
|
git_oid oid;
|
|
|
|
if (git_treebuilder_write(&oid, pending.builder.get()))
|
|
|
|
throw Error("creating a tree object: %s", git_error_last()->message);
|
|
|
|
pendingDirs.pop_back();
|
|
|
|
return {oid, pending.name};
|
|
|
|
};
|
|
|
|
|
|
|
|
void addToTree(const std::string & name, const git_oid & oid, git_filemode_t mode)
|
|
|
|
{
|
|
|
|
assert(!pendingDirs.empty());
|
|
|
|
auto & pending = pendingDirs.back();
|
|
|
|
if (git_treebuilder_insert(nullptr, pending.builder.get(), name.c_str(), &oid, mode))
|
|
|
|
throw Error("adding a file to a tree builder: %s", git_error_last()->message);
|
|
|
|
};
|
|
|
|
|
|
|
|
void updateBuilders(std::span<const std::string> names)
|
|
|
|
{
|
|
|
|
// Find the common prefix of pendingDirs and names.
|
|
|
|
size_t prefixLen = 0;
|
|
|
|
for (; prefixLen < names.size() && prefixLen + 1 < pendingDirs.size(); ++prefixLen)
|
|
|
|
if (names[prefixLen] != pendingDirs[prefixLen + 1].name)
|
|
|
|
break;
|
|
|
|
|
|
|
|
// Finish the builders that are not part of the common prefix.
|
|
|
|
for (auto n = pendingDirs.size(); n > prefixLen + 1; --n) {
|
|
|
|
auto [oid, name] = popBuilder();
|
|
|
|
addToTree(name, oid, GIT_FILEMODE_TREE);
|
|
|
|
}
|
|
|
|
|
|
|
|
// Create builders for the new directories.
|
|
|
|
for (auto n = prefixLen; n < names.size(); ++n)
|
|
|
|
pushBuilder(names[n]);
|
|
|
|
};
|
|
|
|
|
|
|
|
bool prepareDirs(const std::vector<std::string> & pathComponents, bool isDir)
|
|
|
|
{
|
|
|
|
std::span<const std::string> pathComponents2{pathComponents};
|
|
|
|
|
|
|
|
if (pathComponents2.size() <= componentsToStrip) return false;
|
|
|
|
pathComponents2 = pathComponents2.subspan(componentsToStrip);
|
|
|
|
|
|
|
|
updateBuilders(
|
|
|
|
isDir
|
|
|
|
? pathComponents2
|
|
|
|
: pathComponents2.first(pathComponents2.size() - 1));
|
|
|
|
|
|
|
|
return true;
|
|
|
|
}
|
|
|
|
|
|
|
|
void createRegularFile(
|
|
|
|
const Path & path,
|
|
|
|
std::function<void(CreateRegularFileSink &)> func) override
|
|
|
|
{
|
|
|
|
auto pathComponents = tokenizeString<std::vector<std::string>>(path, "/");
|
|
|
|
if (!prepareDirs(pathComponents, false)) return;
|
|
|
|
|
|
|
|
git_writestream * stream = nullptr;
|
|
|
|
if (git_blob_create_from_stream(&stream, *repo, nullptr))
|
|
|
|
throw Error("creating a blob stream object: %s", git_error_last()->message);
|
|
|
|
|
|
|
|
struct CRF : CreateRegularFileSink {
|
|
|
|
const Path & path;
|
|
|
|
GitFileSystemObjectSinkImpl & back;
|
|
|
|
git_writestream * stream;
|
|
|
|
bool executable = false;
|
|
|
|
CRF(const Path & path, GitFileSystemObjectSinkImpl & back, git_writestream * stream)
|
|
|
|
: path(path), back(back), stream(stream)
|
|
|
|
{}
|
|
|
|
void operator () (std::string_view data) override
|
|
|
|
{
|
|
|
|
if (stream->write(stream, data.data(), data.size()))
|
|
|
|
throw Error("writing a blob for tarball member '%s': %s", path, git_error_last()->message);
|
|
|
|
}
|
|
|
|
void isExecutable() override
|
|
|
|
{
|
|
|
|
executable = true;
|
|
|
|
}
|
|
|
|
} crf { path, *this, stream };
|
|
|
|
func(crf);
|
|
|
|
|
|
|
|
git_oid oid;
|
|
|
|
if (git_blob_create_from_stream_commit(&oid, stream))
|
|
|
|
throw Error("creating a blob object for tarball member '%s': %s", path, git_error_last()->message);
|
|
|
|
|
|
|
|
addToTree(*pathComponents.rbegin(), oid,
|
|
|
|
crf.executable
|
|
|
|
? GIT_FILEMODE_BLOB_EXECUTABLE
|
|
|
|
: GIT_FILEMODE_BLOB);
|
|
|
|
}
|
|
|
|
|
|
|
|
void createDirectory(const Path & path) override
|
|
|
|
{
|
|
|
|
auto pathComponents = tokenizeString<std::vector<std::string>>(path, "/");
|
|
|
|
(void) prepareDirs(pathComponents, true);
|
|
|
|
}
|
|
|
|
|
|
|
|
void createSymlink(const Path & path, const std::string & target) override
|
|
|
|
{
|
|
|
|
auto pathComponents = tokenizeString<std::vector<std::string>>(path, "/");
|
|
|
|
if (!prepareDirs(pathComponents, false)) return;
|
|
|
|
|
|
|
|
git_oid oid;
|
|
|
|
if (git_blob_create_from_buffer(&oid, *repo, target.c_str(), target.size()))
|
|
|
|
throw Error("creating a blob object for tarball symlink member '%s': %s", path, git_error_last()->message);
|
|
|
|
|
|
|
|
addToTree(*pathComponents.rbegin(), oid, GIT_FILEMODE_LINK);
|
|
|
|
}
|
|
|
|
|
|
|
|
Hash sync() override {
|
|
|
|
updateBuilders({});
|
|
|
|
|
|
|
|
auto [oid, _name] = popBuilder();
|
|
|
|
|
|
|
|
return toHash(oid);
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2023-12-11 20:30:10 +02:00
|
|
|
ref<GitInputAccessor> GitRepoImpl::getRawAccessor(const Hash & rev)
|
|
|
|
{
|
|
|
|
auto self = ref<GitRepoImpl>(shared_from_this());
|
|
|
|
return make_ref<GitInputAccessor>(self, rev);
|
|
|
|
}
|
|
|
|
|
2024-05-03 13:14:01 +03:00
|
|
|
ref<SourceAccessor> GitRepoImpl::getAccessor(const Hash & rev, bool exportIgnore)
|
2023-10-25 19:55:08 +03:00
|
|
|
{
|
2023-12-11 20:32:18 +02:00
|
|
|
auto self = ref<GitRepoImpl>(shared_from_this());
|
|
|
|
ref<GitInputAccessor> rawGitAccessor = getRawAccessor(rev);
|
|
|
|
if (exportIgnore) {
|
2023-12-11 23:36:08 +02:00
|
|
|
return make_ref<GitExportIgnoreInputAccessor>(self, rawGitAccessor, rev);
|
2023-12-11 20:32:18 +02:00
|
|
|
}
|
|
|
|
else {
|
|
|
|
return rawGitAccessor;
|
|
|
|
}
|
2023-10-25 19:55:08 +03:00
|
|
|
}
|
|
|
|
|
2024-05-03 13:14:01 +03:00
|
|
|
ref<SourceAccessor> GitRepoImpl::getAccessor(const WorkdirInfo & wd, bool exportIgnore, MakeNotAllowedError makeNotAllowedError)
|
2023-12-11 23:28:53 +02:00
|
|
|
{
|
|
|
|
auto self = ref<GitRepoImpl>(shared_from_this());
|
2024-02-20 12:40:02 +02:00
|
|
|
/* In case of an empty workdir, return an empty in-memory tree. We
|
|
|
|
cannot use AllowListInputAccessor because it would return an
|
|
|
|
error for the root (and we can't add the root to the allow-list
|
|
|
|
since that would allow access to all its children). */
|
2024-05-03 13:14:01 +03:00
|
|
|
ref<SourceAccessor> fileAccessor =
|
2024-02-20 12:40:02 +02:00
|
|
|
wd.files.empty()
|
2024-05-03 13:14:01 +03:00
|
|
|
? makeEmptySourceAccessor()
|
2024-02-20 12:40:02 +02:00
|
|
|
: AllowListInputAccessor::create(
|
|
|
|
makeFSInputAccessor(path),
|
|
|
|
std::set<CanonPath> { wd.files },
|
2024-05-03 13:14:01 +03:00
|
|
|
std::move(makeNotAllowedError)).cast<SourceAccessor>();
|
2024-02-20 12:40:02 +02:00
|
|
|
if (exportIgnore)
|
2023-12-11 23:36:08 +02:00
|
|
|
return make_ref<GitExportIgnoreInputAccessor>(self, fileAccessor, std::nullopt);
|
2024-02-20 12:40:02 +02:00
|
|
|
else
|
2023-12-11 23:28:53 +02:00
|
|
|
return fileAccessor;
|
|
|
|
}
|
|
|
|
|
2023-12-21 10:49:52 +02:00
|
|
|
ref<GitFileSystemObjectSink> GitRepoImpl::getFileSystemObjectSink()
|
|
|
|
{
|
|
|
|
return make_ref<GitFileSystemObjectSinkImpl>(ref<GitRepoImpl>(shared_from_this()));
|
|
|
|
}
|
|
|
|
|
2023-11-27 23:34:41 +02:00
|
|
|
std::vector<std::tuple<GitRepoImpl::Submodule, Hash>> GitRepoImpl::getSubmodules(const Hash & rev, bool exportIgnore)
|
2023-10-27 19:39:00 +03:00
|
|
|
{
|
|
|
|
/* Read the .gitmodules files from this revision. */
|
|
|
|
CanonPath modulesFile(".gitmodules");
|
|
|
|
|
2023-11-27 23:34:41 +02:00
|
|
|
auto accessor = getAccessor(rev, exportIgnore);
|
2023-10-27 19:39:00 +03:00
|
|
|
if (!accessor->pathExists(modulesFile)) return {};
|
|
|
|
|
2023-10-31 16:59:25 +02:00
|
|
|
/* Parse it and get the revision of each submodule. */
|
2023-10-27 19:39:00 +03:00
|
|
|
auto configS = accessor->readFile(modulesFile);
|
|
|
|
|
|
|
|
auto [fdTemp, pathTemp] = createTempFile("nix-git-submodules");
|
|
|
|
writeFull(fdTemp.get(), configS);
|
|
|
|
|
2023-10-31 16:59:25 +02:00
|
|
|
std::vector<std::tuple<Submodule, Hash>> result;
|
2023-10-27 19:39:00 +03:00
|
|
|
|
2023-12-11 20:30:10 +02:00
|
|
|
auto rawAccessor = getRawAccessor(rev);
|
|
|
|
|
2024-02-06 23:23:58 +02:00
|
|
|
for (auto & submodule : parseSubmodules(pathTemp)) {
|
2023-12-11 20:30:10 +02:00
|
|
|
auto rev = rawAccessor->getSubmoduleRev(submodule.path);
|
2023-10-31 16:59:25 +02:00
|
|
|
result.push_back({std::move(submodule), rev});
|
2023-10-27 19:39:00 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
return result;
|
|
|
|
}
|
|
|
|
|
2024-02-15 22:58:08 +02:00
|
|
|
ref<GitRepo> getTarballCache()
|
|
|
|
{
|
|
|
|
static auto repoDir = std::filesystem::path(getCacheDir()) / "nix" / "tarball-cache";
|
|
|
|
|
|
|
|
return GitRepo::openRepo(repoDir, true, true);
|
|
|
|
}
|
|
|
|
|
2023-10-25 19:55:08 +03:00
|
|
|
}
|