Skip to content

Commit

Permalink
Drop remaining uses of external "tar"
Browse files Browse the repository at this point in the history
Also, fetchGit now runs in O(1) memory since we pipe the output of
'git archive' directly into unpackTarball() (rather than first reading
it all into memory).
  • Loading branch information
edolstra committed Sep 11, 2019
1 parent d6d7e86 commit 5351f1b
Show file tree
Hide file tree
Showing 7 changed files with 43 additions and 24 deletions.
11 changes: 7 additions & 4 deletions src/libexpr/primops/fetchGit.cc
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
#include "store-api.hh"
#include "pathlocks.hh"
#include "hash.hh"
#include "tarfile.hh"

#include <sys/time.h>

Expand Down Expand Up @@ -166,14 +167,16 @@ GitInfo exportGit(ref<Store> store, const std::string & uri,
if (e.errNo != ENOENT) throw;
}

// FIXME: should pipe this, or find some better way to extract a
// revision.
auto tar = runProgram("git", true, { "-C", cacheDir, "archive", gitInfo.rev });
auto source = sinkToSource([&](Sink & sink) {
RunOptions gitOptions("git", { "-C", cacheDir, "archive", gitInfo.rev });
gitOptions.standardOut = &sink;
runProgram2(gitOptions);
});

Path tmpDir = createTempDir();
AutoDelete delTmpDir(tmpDir, true);

runProgram("tar", true, { "x", "-C", tmpDir }, tar);
unpackTarfile(*source, tmpDir);

gitInfo.storePath = store->addToStore(name, tmpDir);

Expand Down
12 changes: 1 addition & 11 deletions src/libstore/builtins/unpack-channel.cc
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
#include "builtins.hh"
#include "compression.hh"
#include "tarfile.hh"

namespace nix {
Expand All @@ -18,16 +17,7 @@ void builtinUnpackChannel(const BasicDerivation & drv)

createDirs(out);

auto source = sinkToSource([&](Sink & sink) {
auto decompressor =
hasSuffix(src, ".bz2") ? makeDecompressionSink("bzip2", sink) :
hasSuffix(src, ".xz") ? makeDecompressionSink("xz", sink) :
makeDecompressionSink("none", sink);
readFile(src, *decompressor);
decompressor->finish();
});

unpackTarfile(*source, out);
unpackTarfile(src, out);

auto entries = readDirectory(out);
if (entries.size() != 1)
Expand Down
12 changes: 8 additions & 4 deletions src/libstore/download.cc
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
#include "compression.hh"
#include "pathlocks.hh"
#include "finally.hh"
#include "tarfile.hh"

#ifdef ENABLE_S3
#include <aws/core/client/ClientConfiguration.h>
Expand Down Expand Up @@ -898,12 +899,15 @@ CachedDownloadResult Downloader::downloadCached(
unpackedStorePath = "";
}
if (unpackedStorePath.empty()) {
printInfo(format("unpacking '%1%'...") % url);
printInfo("unpacking '%s'...", url);
Path tmpDir = createTempDir();
AutoDelete autoDelete(tmpDir, true);
// FIXME: this requires GNU tar for decompression.
runProgram("tar", true, {"xf", store->toRealPath(storePath), "-C", tmpDir, "--strip-components", "1"});
unpackedStorePath = store->addToStore(name, tmpDir, true, htSHA256, defaultPathFilter, NoRepair);
unpackTarfile(store->toRealPath(storePath), tmpDir, baseNameOf(url));
auto members = readDirectory(tmpDir);
if (members.size() != 1)
throw nix::Error("tarball '%s' contains an unexpected number of top-level files", url);
auto topDir = tmpDir + "/" + members.begin()->name;
unpackedStorePath = store->addToStore(name, topDir, true, htSHA256, defaultPathFilter, NoRepair);
}
replaceSymlink(unpackedStorePath, unpackedLink);
storePath = unpackedStorePath;
Expand Down
1 change: 0 additions & 1 deletion src/libutil/serialise.hh
Original file line number Diff line number Diff line change
Expand Up @@ -77,7 +77,6 @@ struct BufferedSource : Source

size_t read(unsigned char * data, size_t len) override;


bool hasData();

protected:
Expand Down
22 changes: 21 additions & 1 deletion src/libutil/tarfile.cc
Original file line number Diff line number Diff line change
@@ -1,14 +1,34 @@
#include "rust-ffi.hh"
#include "compression.hh"

extern "C" {
rust::CBox2<rust::Result<std::tuple<>>> unpack_tarfile(rust::Source source, rust::StringSlice dest_dir);
}

namespace nix {

void unpackTarfile(Source & source, Path destDir)
void unpackTarfile(Source & source, const Path & destDir)
{
unpack_tarfile(source, destDir).use()->unwrap();
}

void unpackTarfile(const Path & tarFile, const Path & destDir,
std::optional<std::string> baseName)
{
if (!baseName) baseName = baseNameOf(tarFile);

auto source = sinkToSource([&](Sink & sink) {
// FIXME: look at first few bytes to determine compression type.
auto decompressor =
// FIXME: add .gz support
hasSuffix(*baseName, ".bz2") ? makeDecompressionSink("bzip2", sink) :
hasSuffix(*baseName, ".xz") ? makeDecompressionSink("xz", sink) :
makeDecompressionSink("none", sink);
readFile(tarFile, *decompressor);
decompressor->finish();
});

unpackTarfile(*source, destDir);
}

}
5 changes: 4 additions & 1 deletion src/libutil/tarfile.hh
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,9 @@

namespace nix {

void unpackTarfile(Source & source, Path destDir);
void unpackTarfile(Source & source, const Path & destDir);

void unpackTarfile(const Path & tarFile, const Path & destDir,
std::optional<std::string> baseName = {});

}
4 changes: 2 additions & 2 deletions src/nix-prefetch-url/nix-prefetch-url.cc
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
#include "legacy.hh"
#include "finally.hh"
#include "progress-bar.hh"
#include "tarfile.hh"

#include <iostream>

Expand Down Expand Up @@ -192,8 +193,7 @@ static int _main(int argc, char * * argv)
if (hasSuffix(baseNameOf(uri), ".zip"))
runProgram("unzip", true, {"-qq", tmpFile, "-d", unpacked});
else
// FIXME: this requires GNU tar for decompression.
runProgram("tar", true, {"xf", tmpFile, "-C", unpacked});
unpackTarfile(tmpFile, unpacked, baseNameOf(uri));

/* If the archive unpacks to a single file/directory, then use
that as the top-level. */
Expand Down

0 comments on commit 5351f1b

Please sign in to comment.