2015-04-09 13:12:50 +03:00
|
|
|
|
#include "download.hh"
|
|
|
|
|
#include "util.hh"
|
|
|
|
|
#include "globals.hh"
|
2015-05-05 18:09:42 +03:00
|
|
|
|
#include "hash.hh"
|
|
|
|
|
#include "store-api.hh"
|
2016-07-26 22:16:52 +03:00
|
|
|
|
#include "archive.hh"
|
2015-04-09 13:12:50 +03:00
|
|
|
|
|
|
|
|
|
#include <curl/curl.h>
|
|
|
|
|
|
2015-10-07 18:31:50 +03:00
|
|
|
|
#include <iostream>
|
2016-08-10 17:06:33 +03:00
|
|
|
|
#include <thread>
|
2015-10-07 18:31:50 +03:00
|
|
|
|
|
|
|
|
|
|
2015-04-09 13:12:50 +03:00
|
|
|
|
namespace nix {
|
|
|
|
|
|
2015-10-07 18:31:50 +03:00
|
|
|
|
double getTime()
|
|
|
|
|
{
|
|
|
|
|
struct timeval tv;
|
|
|
|
|
gettimeofday(&tv, 0);
|
|
|
|
|
return tv.tv_sec + (tv.tv_usec / 1000000.0);
|
|
|
|
|
}
|
|
|
|
|
|
2016-04-14 17:27:48 +03:00
|
|
|
|
std::string resolveUri(const std::string & uri)
|
|
|
|
|
{
|
|
|
|
|
if (uri.compare(0, 8, "channel:") == 0)
|
|
|
|
|
return "https://nixos.org/channels/" + std::string(uri, 8) + "/nixexprs.tar.xz";
|
|
|
|
|
else
|
|
|
|
|
return uri;
|
|
|
|
|
}
|
|
|
|
|
|
2016-02-29 19:15:20 +02:00
|
|
|
|
struct CurlDownloader : public Downloader
|
2015-04-09 13:12:50 +03:00
|
|
|
|
{
|
|
|
|
|
CURL * curl;
|
2016-04-15 16:11:34 +03:00
|
|
|
|
ref<std::string> data;
|
2015-04-09 13:12:50 +03:00
|
|
|
|
string etag, status, expectedETag;
|
|
|
|
|
|
|
|
|
|
struct curl_slist * requestHeaders;
|
|
|
|
|
|
2015-10-07 18:31:50 +03:00
|
|
|
|
bool showProgress;
|
|
|
|
|
double prevProgressTime{0}, startTime{0};
|
|
|
|
|
unsigned int moveBack{1};
|
|
|
|
|
|
2016-02-29 19:15:20 +02:00
|
|
|
|
size_t writeCallback(void * contents, size_t size, size_t nmemb)
|
2015-04-09 13:12:50 +03:00
|
|
|
|
{
|
|
|
|
|
size_t realSize = size * nmemb;
|
2016-04-15 16:11:34 +03:00
|
|
|
|
data->append((char *) contents, realSize);
|
2015-04-09 13:12:50 +03:00
|
|
|
|
return realSize;
|
|
|
|
|
}
|
|
|
|
|
|
2016-02-29 19:15:20 +02:00
|
|
|
|
static size_t writeCallbackWrapper(void * contents, size_t size, size_t nmemb, void * userp)
|
|
|
|
|
{
|
|
|
|
|
return ((CurlDownloader *) userp)->writeCallback(contents, size, nmemb);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
size_t headerCallback(void * contents, size_t size, size_t nmemb)
|
2015-04-09 13:12:50 +03:00
|
|
|
|
{
|
|
|
|
|
size_t realSize = size * nmemb;
|
|
|
|
|
string line = string((char *) contents, realSize);
|
|
|
|
|
printMsg(lvlVomit, format("got header: %1%") % trim(line));
|
|
|
|
|
if (line.compare(0, 5, "HTTP/") == 0) { // new response starts
|
2016-02-29 19:15:20 +02:00
|
|
|
|
etag = "";
|
2015-04-09 13:12:50 +03:00
|
|
|
|
auto ss = tokenizeString<vector<string>>(line, " ");
|
2016-02-29 19:15:20 +02:00
|
|
|
|
status = ss.size() >= 2 ? ss[1] : "";
|
2015-04-09 13:12:50 +03:00
|
|
|
|
} else {
|
|
|
|
|
auto i = line.find(':');
|
|
|
|
|
if (i != string::npos) {
|
|
|
|
|
string name = trim(string(line, 0, i));
|
|
|
|
|
if (name == "ETag") { // FIXME: case
|
2016-02-29 19:15:20 +02:00
|
|
|
|
etag = trim(string(line, i + 1));
|
2015-04-09 13:12:50 +03:00
|
|
|
|
/* Hack to work around a GitHub bug: it sends
|
|
|
|
|
ETags, but ignores If-None-Match. So if we get
|
|
|
|
|
the expected ETag on a 200 response, then shut
|
|
|
|
|
down the connection because we already have the
|
|
|
|
|
data. */
|
2016-02-29 19:15:20 +02:00
|
|
|
|
printMsg(lvlDebug, format("got ETag: %1%") % etag);
|
|
|
|
|
if (etag == expectedETag && status == "200") {
|
2015-04-09 13:12:50 +03:00
|
|
|
|
printMsg(lvlDebug, format("shutting down on 200 HTTP response with expected ETag"));
|
|
|
|
|
return 0;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
return realSize;
|
|
|
|
|
}
|
|
|
|
|
|
2016-02-29 19:15:20 +02:00
|
|
|
|
static size_t headerCallbackWrapper(void * contents, size_t size, size_t nmemb, void * userp)
|
|
|
|
|
{
|
|
|
|
|
return ((CurlDownloader *) userp)->headerCallback(contents, size, nmemb);
|
|
|
|
|
}
|
|
|
|
|
|
2015-10-08 14:15:21 +03:00
|
|
|
|
int progressCallback(double dltotal, double dlnow)
|
2015-05-05 15:39:48 +03:00
|
|
|
|
{
|
2015-10-07 18:31:50 +03:00
|
|
|
|
if (showProgress) {
|
|
|
|
|
double now = getTime();
|
|
|
|
|
if (prevProgressTime <= now - 1) {
|
|
|
|
|
string s = (format(" [%1$.0f/%2$.0f KiB, %3$.1f KiB/s]")
|
|
|
|
|
% (dlnow / 1024.0)
|
|
|
|
|
% (dltotal / 1024.0)
|
|
|
|
|
% (now == startTime ? 0 : dlnow / 1024.0 / (now - startTime))).str();
|
|
|
|
|
std::cerr << "\e[" << moveBack << "D" << s;
|
|
|
|
|
moveBack = s.size();
|
|
|
|
|
std::cerr.flush();
|
|
|
|
|
prevProgressTime = now;
|
|
|
|
|
}
|
|
|
|
|
}
|
2015-05-05 15:39:48 +03:00
|
|
|
|
return _isInterrupted;
|
|
|
|
|
}
|
|
|
|
|
|
2016-02-29 19:15:20 +02:00
|
|
|
|
static int progressCallbackWrapper(void * userp, double dltotal, double dlnow, double ultotal, double ulnow)
|
2015-10-07 18:31:50 +03:00
|
|
|
|
{
|
2016-02-29 19:15:20 +02:00
|
|
|
|
return ((CurlDownloader *) userp)->progressCallback(dltotal, dlnow);
|
2015-10-07 18:31:50 +03:00
|
|
|
|
}
|
|
|
|
|
|
2016-02-29 19:15:20 +02:00
|
|
|
|
CurlDownloader()
|
2016-04-15 16:11:34 +03:00
|
|
|
|
: data(make_ref<std::string>())
|
2015-04-09 13:12:50 +03:00
|
|
|
|
{
|
|
|
|
|
requestHeaders = 0;
|
|
|
|
|
|
|
|
|
|
curl = curl_easy_init();
|
2016-02-29 19:15:20 +02:00
|
|
|
|
if (!curl) throw nix::Error("unable to initialize curl");
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
~CurlDownloader()
|
|
|
|
|
{
|
|
|
|
|
if (curl) curl_easy_cleanup(curl);
|
|
|
|
|
if (requestHeaders) curl_slist_free_all(requestHeaders);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
bool fetch(const string & url, const DownloadOptions & options)
|
|
|
|
|
{
|
|
|
|
|
showProgress =
|
|
|
|
|
options.showProgress == DownloadOptions::yes ||
|
|
|
|
|
(options.showProgress == DownloadOptions::automatic && isatty(STDERR_FILENO));
|
|
|
|
|
|
|
|
|
|
curl_easy_reset(curl);
|
2015-04-09 13:12:50 +03:00
|
|
|
|
|
|
|
|
|
curl_easy_setopt(curl, CURLOPT_FOLLOWLOCATION, 1L);
|
|
|
|
|
curl_easy_setopt(curl, CURLOPT_USERAGENT, ("Nix/" + nixVersion).c_str());
|
|
|
|
|
curl_easy_setopt(curl, CURLOPT_FAILONERROR, 1);
|
|
|
|
|
|
2016-02-29 19:15:20 +02:00
|
|
|
|
curl_easy_setopt(curl, CURLOPT_WRITEFUNCTION, writeCallbackWrapper);
|
|
|
|
|
curl_easy_setopt(curl, CURLOPT_WRITEDATA, (void *) this);
|
2015-04-09 13:12:50 +03:00
|
|
|
|
|
2016-02-29 19:15:20 +02:00
|
|
|
|
curl_easy_setopt(curl, CURLOPT_HEADERFUNCTION, headerCallbackWrapper);
|
|
|
|
|
curl_easy_setopt(curl, CURLOPT_HEADERDATA, (void *) this);
|
2015-05-05 15:39:48 +03:00
|
|
|
|
|
2016-02-29 19:15:20 +02:00
|
|
|
|
curl_easy_setopt(curl, CURLOPT_PROGRESSFUNCTION, progressCallbackWrapper);
|
|
|
|
|
curl_easy_setopt(curl, CURLOPT_PROGRESSDATA, (void *) this);
|
2015-05-05 15:39:48 +03:00
|
|
|
|
curl_easy_setopt(curl, CURLOPT_NOPROGRESS, 0);
|
2016-02-22 14:13:19 +02:00
|
|
|
|
|
|
|
|
|
curl_easy_setopt(curl, CURLOPT_NOSIGNAL, 1);
|
2015-10-21 16:03:29 +03:00
|
|
|
|
|
2015-04-09 13:12:50 +03:00
|
|
|
|
curl_easy_setopt(curl, CURLOPT_URL, url.c_str());
|
|
|
|
|
|
2015-10-21 15:59:01 +03:00
|
|
|
|
if (options.verifyTLS)
|
|
|
|
|
curl_easy_setopt(curl, CURLOPT_CAINFO, getEnv("SSL_CERT_FILE", "/etc/ssl/certs/ca-certificates.crt").c_str());
|
|
|
|
|
else {
|
|
|
|
|
curl_easy_setopt(curl, CURLOPT_SSL_VERIFYPEER, 0);
|
|
|
|
|
curl_easy_setopt(curl, CURLOPT_SSL_VERIFYHOST, 0);
|
|
|
|
|
}
|
|
|
|
|
|
2016-07-21 19:35:31 +03:00
|
|
|
|
data = make_ref<std::string>();
|
2015-04-09 13:12:50 +03:00
|
|
|
|
|
|
|
|
|
if (requestHeaders) {
|
|
|
|
|
curl_slist_free_all(requestHeaders);
|
|
|
|
|
requestHeaders = 0;
|
|
|
|
|
}
|
|
|
|
|
|
2015-10-21 15:59:01 +03:00
|
|
|
|
if (!options.expectedETag.empty()) {
|
|
|
|
|
this->expectedETag = options.expectedETag;
|
|
|
|
|
requestHeaders = curl_slist_append(requestHeaders, ("If-None-Match: " + options.expectedETag).c_str());
|
2015-04-09 13:12:50 +03:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
curl_easy_setopt(curl, CURLOPT_HTTPHEADER, requestHeaders);
|
|
|
|
|
|
2016-02-29 19:15:20 +02:00
|
|
|
|
if (options.head)
|
|
|
|
|
curl_easy_setopt(curl, CURLOPT_NOBODY, 1);
|
|
|
|
|
|
2015-10-07 18:31:50 +03:00
|
|
|
|
if (showProgress) {
|
|
|
|
|
std::cerr << (format("downloading ‘%1%’... ") % url);
|
|
|
|
|
std::cerr.flush();
|
|
|
|
|
startTime = getTime();
|
|
|
|
|
}
|
|
|
|
|
|
2015-04-09 13:12:50 +03:00
|
|
|
|
CURLcode res = curl_easy_perform(curl);
|
2015-10-07 18:31:50 +03:00
|
|
|
|
if (showProgress)
|
|
|
|
|
//std::cerr << "\e[" << moveBack << "D\e[K\n";
|
|
|
|
|
std::cerr << "\n";
|
2015-05-05 15:39:48 +03:00
|
|
|
|
checkInterrupt();
|
2015-10-21 15:59:01 +03:00
|
|
|
|
if (res == CURLE_WRITE_ERROR && etag == options.expectedETag) return false;
|
2016-02-29 19:15:20 +02:00
|
|
|
|
|
|
|
|
|
long httpStatus = -1;
|
2016-04-06 17:57:20 +03:00
|
|
|
|
curl_easy_getinfo(curl, CURLINFO_RESPONSE_CODE, &httpStatus);
|
2016-02-29 19:15:20 +02:00
|
|
|
|
|
|
|
|
|
if (res != CURLE_OK) {
|
|
|
|
|
Error err =
|
|
|
|
|
httpStatus == 404 ? NotFound :
|
2016-08-10 17:06:33 +03:00
|
|
|
|
httpStatus == 403 ? Forbidden :
|
|
|
|
|
(httpStatus == 408 || httpStatus == 500 || httpStatus == 503
|
|
|
|
|
|| httpStatus == 504 || httpStatus == 522 || httpStatus == 524
|
|
|
|
|
|| res == CURLE_COULDNT_RESOLVE_HOST) ? Transient :
|
|
|
|
|
Misc;
|
2016-08-10 15:57:10 +03:00
|
|
|
|
if (res == CURLE_HTTP_RETURNED_ERROR && httpStatus != -1)
|
|
|
|
|
throw DownloadError(err, format("unable to download ‘%s’: HTTP error %d")
|
|
|
|
|
% url % httpStatus);
|
|
|
|
|
else
|
|
|
|
|
throw DownloadError(err, format("unable to download ‘%s’: %s (%d)")
|
|
|
|
|
% url % curl_easy_strerror(res) % res);
|
2016-02-29 19:15:20 +02:00
|
|
|
|
}
|
2015-04-09 13:12:50 +03:00
|
|
|
|
|
|
|
|
|
if (httpStatus == 304) return false;
|
|
|
|
|
|
|
|
|
|
return true;
|
|
|
|
|
}
|
2016-02-29 19:15:20 +02:00
|
|
|
|
|
|
|
|
|
DownloadResult download(string url, const DownloadOptions & options) override
|
|
|
|
|
{
|
2016-08-10 17:06:33 +03:00
|
|
|
|
size_t attempt = 0;
|
|
|
|
|
|
|
|
|
|
while (true) {
|
|
|
|
|
try {
|
|
|
|
|
DownloadResult res;
|
|
|
|
|
if (fetch(resolveUri(url), options)) {
|
|
|
|
|
res.cached = false;
|
|
|
|
|
res.data = data;
|
|
|
|
|
} else
|
|
|
|
|
res.cached = true;
|
|
|
|
|
res.etag = etag;
|
|
|
|
|
return res;
|
|
|
|
|
} catch (DownloadError & e) {
|
|
|
|
|
attempt++;
|
|
|
|
|
if (e.error != Transient || attempt >= options.tries) throw;
|
|
|
|
|
auto ms = 25 * (1 << (attempt - 1));
|
|
|
|
|
printMsg(lvlError, format("warning: %s; retrying in %d ms") % e.what() % ms);
|
|
|
|
|
std::this_thread::sleep_for(std::chrono::milliseconds(ms));
|
|
|
|
|
}
|
|
|
|
|
}
|
2016-02-29 19:15:20 +02:00
|
|
|
|
}
|
2015-04-09 13:12:50 +03:00
|
|
|
|
};
|
|
|
|
|
|
2016-02-29 19:15:20 +02:00
|
|
|
|
ref<Downloader> makeDownloader()
|
2015-04-09 13:12:50 +03:00
|
|
|
|
{
|
2016-02-29 19:15:20 +02:00
|
|
|
|
return make_ref<CurlDownloader>();
|
2015-04-09 13:12:50 +03:00
|
|
|
|
}
|
|
|
|
|
|
2016-08-15 14:37:11 +03:00
|
|
|
|
Path Downloader::downloadCached(ref<Store> store, const string & url_, bool unpack, string name, const Hash & expectedHash)
|
2015-05-05 18:09:42 +03:00
|
|
|
|
{
|
2016-04-14 17:27:48 +03:00
|
|
|
|
auto url = resolveUri(url_);
|
|
|
|
|
|
2016-08-15 14:37:11 +03:00
|
|
|
|
if (name == "") {
|
|
|
|
|
auto p = url.rfind('/');
|
|
|
|
|
if (p != string::npos) name = string(url, p + 1);
|
|
|
|
|
}
|
2016-07-26 22:16:52 +03:00
|
|
|
|
|
|
|
|
|
Path expectedStorePath;
|
|
|
|
|
if (expectedHash) {
|
2016-07-26 22:25:52 +03:00
|
|
|
|
expectedStorePath = store->makeFixedOutputPath(unpack, expectedHash, name);
|
2016-07-26 22:16:52 +03:00
|
|
|
|
if (store->isValidPath(expectedStorePath))
|
|
|
|
|
return expectedStorePath;
|
|
|
|
|
}
|
|
|
|
|
|
2016-04-20 15:12:38 +03:00
|
|
|
|
Path cacheDir = getCacheDir() + "/nix/tarballs";
|
2015-05-05 18:09:42 +03:00
|
|
|
|
createDirs(cacheDir);
|
|
|
|
|
|
|
|
|
|
string urlHash = printHash32(hashString(htSHA256, url));
|
|
|
|
|
|
|
|
|
|
Path dataFile = cacheDir + "/" + urlHash + ".info";
|
|
|
|
|
Path fileLink = cacheDir + "/" + urlHash + "-file";
|
|
|
|
|
|
|
|
|
|
Path storePath;
|
|
|
|
|
|
|
|
|
|
string expectedETag;
|
|
|
|
|
|
|
|
|
|
int ttl = settings.get("tarball-ttl", 60 * 60);
|
|
|
|
|
bool skip = false;
|
|
|
|
|
|
|
|
|
|
if (pathExists(fileLink) && pathExists(dataFile)) {
|
|
|
|
|
storePath = readLink(fileLink);
|
|
|
|
|
store->addTempRoot(storePath);
|
|
|
|
|
if (store->isValidPath(storePath)) {
|
|
|
|
|
auto ss = tokenizeString<vector<string>>(readFile(dataFile), "\n");
|
|
|
|
|
if (ss.size() >= 3 && ss[0] == url) {
|
|
|
|
|
time_t lastChecked;
|
|
|
|
|
if (string2Int(ss[2], lastChecked) && lastChecked + ttl >= time(0))
|
|
|
|
|
skip = true;
|
|
|
|
|
else if (!ss[1].empty()) {
|
|
|
|
|
printMsg(lvlDebug, format("verifying previous ETag ‘%1%’") % ss[1]);
|
|
|
|
|
expectedETag = ss[1];
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
} else
|
|
|
|
|
storePath = "";
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (!skip) {
|
|
|
|
|
|
|
|
|
|
try {
|
2015-10-21 15:59:01 +03:00
|
|
|
|
DownloadOptions options;
|
|
|
|
|
options.expectedETag = expectedETag;
|
2016-02-29 19:15:20 +02:00
|
|
|
|
auto res = download(url, options);
|
2015-05-05 18:09:42 +03:00
|
|
|
|
|
2016-07-26 22:16:52 +03:00
|
|
|
|
if (!res.cached) {
|
|
|
|
|
ValidPathInfo info;
|
|
|
|
|
StringSink sink;
|
|
|
|
|
dumpString(*res.data, sink);
|
|
|
|
|
Hash hash = hashString(expectedHash ? expectedHash.type : htSHA256, *res.data);
|
2016-07-26 22:25:52 +03:00
|
|
|
|
info.path = store->makeFixedOutputPath(false, hash, name);
|
2016-07-26 22:16:52 +03:00
|
|
|
|
info.narHash = hashString(htSHA256, *sink.s);
|
|
|
|
|
store->addToStore(info, *sink.s, false, true);
|
|
|
|
|
storePath = info.path;
|
|
|
|
|
}
|
2015-05-05 18:09:42 +03:00
|
|
|
|
|
|
|
|
|
assert(!storePath.empty());
|
|
|
|
|
replaceSymlink(storePath, fileLink);
|
|
|
|
|
|
2015-10-29 14:26:55 +02:00
|
|
|
|
writeFile(dataFile, url + "\n" + res.etag + "\n" + std::to_string(time(0)) + "\n");
|
2015-05-05 18:09:42 +03:00
|
|
|
|
} catch (DownloadError & e) {
|
|
|
|
|
if (storePath.empty()) throw;
|
|
|
|
|
printMsg(lvlError, format("warning: %1%; using cached result") % e.msg());
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (unpack) {
|
|
|
|
|
Path unpackedLink = cacheDir + "/" + baseNameOf(storePath) + "-unpacked";
|
|
|
|
|
Path unpackedStorePath;
|
|
|
|
|
if (pathExists(unpackedLink)) {
|
|
|
|
|
unpackedStorePath = readLink(unpackedLink);
|
|
|
|
|
store->addTempRoot(unpackedStorePath);
|
|
|
|
|
if (!store->isValidPath(unpackedStorePath))
|
|
|
|
|
unpackedStorePath = "";
|
|
|
|
|
}
|
|
|
|
|
if (unpackedStorePath.empty()) {
|
|
|
|
|
printMsg(lvlInfo, format("unpacking ‘%1%’...") % url);
|
|
|
|
|
Path tmpDir = createTempDir();
|
|
|
|
|
AutoDelete autoDelete(tmpDir, true);
|
2015-06-01 16:14:44 +03:00
|
|
|
|
// FIXME: this requires GNU tar for decompression.
|
2015-05-05 18:09:42 +03:00
|
|
|
|
runProgram("tar", true, {"xf", storePath, "-C", tmpDir, "--strip-components", "1"}, "");
|
|
|
|
|
unpackedStorePath = store->addToStore(name, tmpDir, true, htSHA256, defaultPathFilter, false);
|
|
|
|
|
}
|
|
|
|
|
replaceSymlink(unpackedStorePath, unpackedLink);
|
2016-07-26 22:16:52 +03:00
|
|
|
|
storePath = unpackedStorePath;
|
2015-05-05 18:09:42 +03:00
|
|
|
|
}
|
|
|
|
|
|
2016-07-26 22:16:52 +03:00
|
|
|
|
if (expectedStorePath != "" && storePath != expectedStorePath)
|
|
|
|
|
throw nix::Error(format("hash mismatch in file downloaded from ‘%s’") % url);
|
|
|
|
|
|
2015-05-05 18:09:42 +03:00
|
|
|
|
return storePath;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2015-05-06 15:54:31 +03:00
|
|
|
|
bool isUri(const string & s)
|
|
|
|
|
{
|
2016-04-14 17:27:48 +03:00
|
|
|
|
if (s.compare(0, 8, "channel:") == 0) return true;
|
2015-05-06 15:54:31 +03:00
|
|
|
|
size_t pos = s.find("://");
|
|
|
|
|
if (pos == string::npos) return false;
|
|
|
|
|
string scheme(s, 0, pos);
|
2016-04-29 21:14:44 +03:00
|
|
|
|
return scheme == "http" || scheme == "https" || scheme == "file" || scheme == "channel" || scheme == "git";
|
2015-05-06 15:54:31 +03:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2015-04-09 13:12:50 +03:00
|
|
|
|
}
|