about summary refs log tree commit diff
path: root/tvix/store/src/import.rs
diff options
context:
space:
mode:
authorFlorian Klink <flokli@flokli.de>2023-02-27T12·03+0100
committerflokli <flokli@flokli.de>2023-03-10T10·58+0000
commitb29d1ae372bb0794cc8425ced7986b3d059a2be5 (patch)
tree05ecc2b78c6f005245c4896b1692e0ec4f529a05 /tvix/store/src/import.rs
parent52a5181ebac94f7b6d5a1acab182d8f7fcc59c8c (diff)
feat(tvix/store): add import::import_path r/5932
This imports the contents at a given Path into the tvix store.

It doesn't register the contents at a Path in the store itself, that's up
to the PathInfoService.

Change-Id: I2c493532d65b90f199ddb7dfc90249f5c2957dee
Reviewed-on: https://cl.tvl.fyi/c/depot/+/8159
Reviewed-by: raitobezarius <tvl@lahfa.xyz>
Tested-by: BuildkiteCI
Diffstat (limited to 'tvix/store/src/import.rs')
-rw-r--r--tvix/store/src/import.rs248
1 files changed, 248 insertions, 0 deletions
diff --git a/tvix/store/src/import.rs b/tvix/store/src/import.rs
new file mode 100644
index 0000000000..d2abb292fc
--- /dev/null
+++ b/tvix/store/src/import.rs
@@ -0,0 +1,248 @@
+use crate::{proto, BlobWriter};
+use std::{
+    collections::HashMap,
+    fmt::Debug,
+    fs,
+    fs::File,
+    io::BufReader,
+    os::unix::prelude::PermissionsExt,
+    path::{Path, PathBuf},
+};
+use tracing::instrument;
+use walkdir::WalkDir;
+
+use crate::{
+    blobservice::BlobService, chunkservice::ChunkService, directoryservice::DirectoryService,
+};
+
+#[derive(Debug, thiserror::Error)]
+pub enum Error {
+    #[error("failed to upload directory at {0}: {1}")]
+    UploadDirectoryError(PathBuf, crate::Error),
+
+    #[error("invalid encoding encountered for entry {0:?}")]
+    InvalidEncoding(PathBuf),
+
+    #[error("unable to stat {0}: {1}")]
+    UnableToStat(PathBuf, std::io::Error),
+
+    #[error("unable to open {0}: {1}")]
+    UnableToOpen(PathBuf, std::io::Error),
+
+    #[error("unable to read {0}: {1}")]
+    UnableToRead(PathBuf, std::io::Error),
+}
+
+impl From<super::Error> for Error {
+    fn from(value: super::Error) -> Self {
+        match value {
+            crate::Error::InvalidRequest(_) => panic!("tvix bug"),
+            crate::Error::StorageError(_) => panic!("error"),
+        }
+    }
+}
+
+// This processes a given [walkdir::DirEntry] and returns a
+// proto::node::Node, depending on the type of the entry.
+//
+// If the entry is a file, its contents are uploaded.
+// If the entry is a directory, the Directory is uploaded as well.
+// For this to work, it relies on the caller to provide the directory object
+// with the previously returned (child) nodes.
+//
+// It assumes entries to be returned in "contents first" order, means this
+// will only be called with a directory if all children of it have been
+// visited. If the entry is indeed a directory, it'll also upload that
+// directory to the store. For this, the so-far-assembled Directory object for
+// this path needs to be passed in.
+//
+// It assumes the caller adds returned nodes to the directories it assembles.
+#[instrument(skip_all, fields(entry.file_type=?&entry.file_type(),entry.path=?entry.path()))]
+fn process_entry<BS: BlobService, CS: ChunkService, DS: DirectoryService>(
+    blob_service: &mut BS,
+    chunk_service: &mut CS,
+    directory_service: &mut DS,
+    entry: &walkdir::DirEntry,
+    maybe_directory: Option<proto::Directory>,
+) -> Result<proto::node::Node, Error> {
+    let file_type = entry.file_type();
+
+    let entry_path: PathBuf = entry.path().to_path_buf();
+
+    if file_type.is_dir() {
+        let directory = maybe_directory
+            .expect("tvix bug: must be called with some directory in the case of directory");
+        let directory_digest = directory.digest();
+        let directory_size = directory.size();
+
+        // upload this directory
+        directory_service
+            .put(directory)
+            .map_err(|e| Error::UploadDirectoryError(entry.path().to_path_buf(), e))?;
+
+        return Ok(proto::node::Node::Directory(proto::DirectoryNode {
+            name: entry
+                .file_name()
+                .to_str()
+                .map(|s| Ok(s.to_owned()))
+                .unwrap_or(Err(Error::InvalidEncoding(entry.path().to_path_buf())))?,
+            digest: directory_digest,
+            size: directory_size,
+        }));
+    }
+
+    if file_type.is_symlink() {
+        let target = std::fs::read_link(&entry_path)
+            .map_err(|e| Error::UnableToStat(entry_path.clone(), e))?;
+
+        return Ok(proto::node::Node::Symlink(proto::SymlinkNode {
+            name: entry
+                .file_name()
+                .to_str()
+                .map(|s| Ok(s.to_owned()))
+                .unwrap_or(Err(Error::InvalidEncoding(entry.path().to_path_buf())))?,
+            target: target
+                .to_str()
+                .map(|s| Ok(s.to_owned()))
+                .unwrap_or(Err(Error::InvalidEncoding(entry.path().to_path_buf())))?,
+        }));
+    }
+
+    if file_type.is_file() {
+        let metadata = entry
+            .metadata()
+            .map_err(|e| Error::UnableToStat(entry_path.clone(), e.into()))?;
+
+        // hash the file contents, upload chunks if not there yet
+        let (blob_digest, blob_meta) = {
+            let mut blob_writer = BlobWriter::new(chunk_service);
+
+            let file = File::open(entry_path.clone())
+                .map_err(|e| Error::UnableToOpen(entry_path.clone(), e))?;
+
+            let mut file_reader = BufReader::new(file);
+
+            std::io::copy(&mut file_reader, &mut blob_writer)
+                .map_err(|e| Error::UnableToRead(entry_path, e))?;
+
+            // TODO: handle errors
+            blob_writer.finalize().unwrap()
+        };
+
+        // upload blobmeta if not there yet
+        if blob_service
+            .stat(&proto::StatBlobRequest {
+                digest: blob_digest.to_vec(),
+                include_chunks: false,
+                include_bao: false,
+            })?
+            .is_none()
+        {
+            // upload blobmeta
+            blob_service.put(&blob_digest, blob_meta)?;
+        }
+
+        return Ok(proto::node::Node::File(proto::FileNode {
+            name: entry
+                .file_name()
+                .to_str()
+                .map(|s| Ok(s.to_owned()))
+                .unwrap_or(Err(Error::InvalidEncoding(entry.path().to_path_buf())))?,
+            digest: blob_digest,
+            size: metadata.len() as u32,
+            // If it's executable by the user, it'll become executable.
+            // This matches nix's dump() function behaviour.
+            executable: metadata.permissions().mode() & 64 != 0,
+        }));
+    }
+    todo!("handle other types")
+}
+
+/// Imports the contents at a given Path into the tvix store.
+///
+/// It doesn't register the contents at a Path in the store itself, that's up
+/// to the PathInfoService.
+//
+// returns the root node, or an error.
+#[instrument(skip(blob_service, chunk_service, directory_service), fields(path=?p))]
+pub fn import_path<
+    BS: BlobService,
+    CS: ChunkService,
+    DS: DirectoryService,
+    P: AsRef<Path> + Debug,
+>(
+    blob_service: &mut BS,
+    chunk_service: &mut CS,
+    directory_service: &mut DS,
+    p: P,
+) -> Result<proto::node::Node, Error> {
+    // Probe if the path points to a symlink. If it does, we process it manually,
+    // due to https://github.com/BurntSushi/walkdir/issues/175.
+    let symlink_metadata = fs::symlink_metadata(p.as_ref())
+        .map_err(|e| Error::UnableToStat(p.as_ref().to_path_buf(), e))?;
+    if symlink_metadata.is_symlink() {
+        let target = std::fs::read_link(p.as_ref())
+            .map_err(|e| Error::UnableToStat(p.as_ref().to_path_buf(), e))?;
+        return Ok(proto::node::Node::Symlink(proto::SymlinkNode {
+            name: p
+                .as_ref()
+                .file_name()
+                .unwrap_or_default()
+                .to_str()
+                .map(|s| Ok(s.to_owned()))
+                .unwrap_or(Err(Error::InvalidEncoding(p.as_ref().to_path_buf())))?,
+            target: target
+                .to_str()
+                .map(|s| Ok(s.to_owned()))
+                .unwrap_or(Err(Error::InvalidEncoding(p.as_ref().to_path_buf())))?,
+        }));
+    }
+
+    let mut directories: HashMap<PathBuf, proto::Directory> = HashMap::default();
+
+    // TODO: make sure we traverse in sorted order, or insert to parent_directory in sorted order at least.
+    for entry in WalkDir::new(p).follow_links(false).contents_first(true) {
+        let entry = entry.unwrap();
+
+        // process_entry wants an Option<Directory> in case the entry points to a directory.
+        // make sure to provide it.
+        let maybe_directory: Option<proto::Directory> = {
+            if entry.file_type().is_dir() {
+                Some(
+                    directories
+                        .entry(entry.path().to_path_buf())
+                        .or_default()
+                        .clone(),
+                )
+            } else {
+                None
+            }
+        };
+
+        let node = process_entry(
+            blob_service,
+            chunk_service,
+            directory_service,
+            &entry,
+            maybe_directory,
+        )?;
+
+        if entry.depth() == 0 {
+            return Ok(node);
+        } else {
+            // calculate the parent path, and make sure we register the node there.
+            // NOTE: entry.depth() > 0
+            let parent_path = entry.path().parent().unwrap().to_path_buf();
+
+            // record node in parent directory, creating a new [proto:Directory] if not there yet.
+            let parent_directory = directories.entry(parent_path).or_default();
+            match node {
+                proto::node::Node::Directory(e) => parent_directory.directories.push(e),
+                proto::node::Node::File(e) => parent_directory.files.push(e),
+                proto::node::Node::Symlink(e) => parent_directory.symlinks.push(e),
+            }
+        }
+    }
+    // unreachable, we already bailed out before if root doesn't exist.
+    panic!("tvix bug")
+}