diff --git a/.clippy.toml b/.clippy.toml
index 39a1dcb0..c8c25d34 100644
--- a/.clippy.toml
+++ b/.clippy.toml
@@ -1,15 +1,15 @@
excessive-nesting-threshold = 4
too-many-arguments-threshold = 10
-allowed-prefixes = ["..", "GPU", "Orca", "Local"]
-min-ident-chars-threshold = 2
allowed-idents-below-min-chars = ["..", "k", "f", "re", "id", "Ok", "'_"]
allowed-duplicate-crates = [
- "hashbrown",
- "indexmap",
- "windows-sys",
- "thiserror",
- "thiserror-impl",
"bitflags",
+ "clap",
+ "clap_derive",
+ "clap_lex",
+ "hashbrown",
"heck",
+ "indexmap",
+ "strsim",
"syn",
+ "windows-sys",
]
diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile
index d98b6b5d..6a0d22a1 100644
--- a/.devcontainer/Dockerfile
+++ b/.devcontainer/Dockerfile
@@ -20,6 +20,7 @@ RUN \
echo '%sudo ALL=(ALL) NOPASSWD:ALL' >> /etc/sudoers && \
echo '. /etc/bash_completion' >> /home/vscode/.bashrc && \
echo 'export PS1="\[\e[32;1m\]\u\[\e[m\]@\[\e[34;1m\]\H\[\e[m\]:\[\e[33;1m\]\w\[\e[m\]$ "' >> /home/vscode/.bashrc && \
+ chown vscode:nogroup /home/vscode/.bashrc && \
apt-get clean
USER vscode
@@ -37,4 +38,12 @@ RUN \
cargo install cargo-modules && \
# expand rust macros (useful in debugging)
cargo install cargo-expand
+RUN \
+ # install python manager
+ curl -LsSf https://astral.sh/uv/install.sh | sh && \
+ uv venv -p 3.9 ~/.local/share/base && \
+ # pip package based on C lib/client
+ uv pip install cffi maturin ipykernel -p ~/.local/share/base && \
+ echo '. ~/.local/share/base/bin/activate' >> ~/.bashrc
+ENV VIRTUAL_ENV=/home/vscode/.local/share/base
CMD ["bash", "-c", "sudo rm /var/run/docker.pid; sudo dockerd"]
diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json
index e5f5a54d..95a1a1ae 100644
--- a/.devcontainer/devcontainer.json
+++ b/.devcontainer/devcontainer.json
@@ -1,7 +1,8 @@
{
"name": "Development",
"build": {
- "dockerfile": "Dockerfile"
+ "dockerfile": "Dockerfile",
+ "context": ".."
},
"overrideCommand": false,
"workspaceFolder": "/workspaces/${localWorkspaceFolderBasename}",
@@ -33,7 +34,10 @@
"eamodio.gitlens", // Git explorer in VSCode
"streetsidesoftware.code-spell-checker", // Catch spelling errors in docs
"GitHub.copilot-chat", // GitHub Copilot AI assistant
- "tintinweb.graphviz-interactive-preview" // Graphviz DOT preview
+ "tintinweb.graphviz-interactive-preview", // Graphviz DOT preview
+ "ms-python.python", // Python IDE
+ "ms-toolsai.jupyter", // Jupyter IDE
+ "ms-python.black-formatter" // Python code formatter
]
}
}
diff --git a/.devcontainer/gpu/Dockerfile b/.devcontainer/gpu/Dockerfile
index dddb1829..f375d579 100644
--- a/.devcontainer/gpu/Dockerfile
+++ b/.devcontainer/gpu/Dockerfile
@@ -38,4 +38,12 @@ RUN apt-get update && curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs
apt-get clean
ENV PATH=${PATH}:/root/.local/bin
+RUN \
+ # install python manager
+ curl -LsSf https://astral.sh/uv/install.sh | sh && \
+ uv venv -p 3.9 ~/.local/share/base && \
+ # pip package based on C lib/client
+ uv pip install cffi maturin ipykernel -p ~/.local/share/base && \
+ echo '. ~/.local/share/base/bin/activate' >> ~/.bashrc
+ENV VIRTUAL_ENV=/root/.local/share/base
CMD ["bash", "-c", "sudo rm /var/run/docker.pid; sudo dockerd"]
diff --git a/.devcontainer/gpu/devcontainer.json b/.devcontainer/gpu/devcontainer.json
index 137b69dc..ebbaba51 100644
--- a/.devcontainer/gpu/devcontainer.json
+++ b/.devcontainer/gpu/devcontainer.json
@@ -1,7 +1,8 @@
{
"name": "Development (GPU)",
"build": {
- "dockerfile": "Dockerfile"
+ "dockerfile": "Dockerfile",
+ "context": "../.."
},
"overrideCommand": false,
"workspaceFolder": "/workspaces/${localWorkspaceFolderBasename}",
@@ -32,7 +33,12 @@
"ms-vscode.hexeditor", // Binary preview in HEX
"tamasfe.even-better-toml", // *.toml language support
"eamodio.gitlens", // Git explorer in VSCode
- "streetsidesoftware.code-spell-checker" // Catch spelling errors in docs
+ "streetsidesoftware.code-spell-checker", // Catch spelling errors in docs
+ "GitHub.copilot-chat", // GitHub Copilot AI assistant
+ "tintinweb.graphviz-interactive-preview", // Graphviz DOT preview
+ "ms-python.python", // Python IDE
+ "ms-toolsai.jupyter", // Jupyter IDE
+ "ms-python.black-formatter" // Python code formatter
]
}
},
diff --git a/.dockerignore b/.dockerignore
new file mode 100644
index 00000000..a72180d7
--- /dev/null
+++ b/.dockerignore
@@ -0,0 +1,4 @@
+docs/
+target/
+.dockerignore
+README.md
diff --git a/.github/workflows/docs.yaml b/.github/workflows/docs.yaml
index 9a3e2f39..e885bc3b 100644
--- a/.github/workflows/docs.yaml
+++ b/.github/workflows/docs.yaml
@@ -21,18 +21,18 @@ jobs:
token: ${{ steps.generate_app_token.outputs.token }}
- name: Generate crate diagram
run: |
- mkdir -p docs
+ mkdir -p docs/images
set -o pipefail
cargo modules dependencies \
--no-uses --no-fns \
--focus-on "orcapod::uniffi::model::{Pod}" \
--layout dot | \
- dot -T svg > docs/crate_diagram.svg
+ dot -T svg > docs/images/crate_diagram.svg
- name: Sync GitHub
run: |
git config user.name github-actions
git config user.email github-actions@github.com
- git add docs/crate_diagram.svg
+ git add docs/images/crate_diagram.svg
git commit -m "Update crate diagram." || echo "Diagram unchanged, skipping commit."
git push
api:
diff --git a/.vscode/launch.json b/.vscode/launch.json
index 6d832068..5cf66bf2 100644
--- a/.vscode/launch.json
+++ b/.vscode/launch.json
@@ -94,5 +94,20 @@
"args": [],
"cwd": "${workspaceFolder}",
},
+ {
+ "name": "Python: Debug File",
+ "type": "debugpy",
+ "request": "launch",
+ "console": "integratedTerminal",
+ "justMyCode": false,
+ "preLaunchTask": "package_orcapod_python",
+ "env": {
+ "RUST_BACKTRACE": "1"
+ },
+ "program": "tests/extra/python/smoke_test.py",
+ "args": [
+ "./tests/.tmp"
+ ]
+ }
]
}
diff --git a/.vscode/settings.json b/.vscode/settings.json
index ba6fd8f2..48efb42c 100644
--- a/.vscode/settings.json
+++ b/.vscode/settings.json
@@ -2,7 +2,6 @@
"editor.formatOnPaste": false,
"editor.formatOnSave": true,
"files.insertFinalNewline": true,
- "notebook.formatOnSave.enabled": true,
"editor.rulers": [
100
],
@@ -18,4 +17,8 @@
"gitlens.showWelcomeOnInstall": false,
"gitlens.showWhatsNewAfterUpgrades": false,
"lldb.consoleMode": "evaluate",
+ "python.defaultInterpreterPath": "~/.local/share/base/bin/python3",
+ "python.terminal.activateEnvironment": false,
+ "notebook.formatOnSave.enabled": true,
+ "notebook.output.scrolling": true
}
diff --git a/.vscode/tasks.json b/.vscode/tasks.json
new file mode 100644
index 00000000..c1cdb2c9
--- /dev/null
+++ b/.vscode/tasks.json
@@ -0,0 +1,10 @@
+{
+ "version": "2.0.0",
+ "tasks": [
+ {
+ "label": "package_orcapod_python",
+ "type": "shell",
+ "command": ". ~/.local/share/base/bin/activate && maturin develop --uv"
+ }
+ ]
+}
diff --git a/Cargo.toml b/Cargo.toml
index 3a3c9e94..95d06535 100644
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -23,26 +23,57 @@ categories = [
license = "MIT license"
edition = "2024"
+[lib]
+crate-type = ["rlib", "cdylib"]
+
[dependencies]
-bollard = "0.17.1" # docker API in orchestrator
-chrono = "0.4.39" # datetime utilities
-colored = "2.1.0" # font colors in printed strings
-futures-util = "0.3.31" # chaining async calls and processing stream data in local docker orchestrator
-glob = "0.3.1" # recursive walk of filesystem in filestore
-heck = "0.5.0" # strings to snake_case
-names = "0.14.0" # random name generator
-regex = "1.11.0" # complex pattern matching in strings
-serde = { version = "1.0.210", features = ["derive"] } # serialization/deserialization to/from filestore
-serde_json = "1.0.137" # JSON in sharing memory with local docker orchestrator
-serde_yaml = "0.9.34" # YAML in filestore
-sha2 = "0.10.8" # checksums based on SHA256
-snafu = { version = "0.8.5", features = ["futures"] } # library error handling API
-tokio = { version = "1.41.0", features = ["full"] } # a runtime for async applications
-tokio-util = "0.7.13" # utilities for async calls
+# make async fn in traits work with dyn traits
+async-trait = "0.1.88"
+# docker API in orchestrator
+bollard = "0.17.1"
+# datetime utilities
+chrono = "0.4.39"
+# font colors in printed strings
+colored = "2.1.0"
+# derive utilities for new types
+derive_more = { version = "2.0.1", features = ["display"] }
+# chaining async calls and processing stream data in local docker orchestrator
+futures-util = "0.3.31"
+# auto derive getter access methods on structs
+getset = { version = "0.1.5", git = "https://github.com/guzman-raphael/getset.git", branch = "impl_attrs" }
+# recursive walk of filesystem in filestore
+glob = "0.3.1"
+# strings to snake_case
+heck = "0.5.0"
+# random name generator
+names = "0.14.0"
+# complex pattern matching in strings
+regex = "1.11.0"
+# serialization/deserialization to/from filestore
+serde = { version = "1.0.210", features = ["derive"] }
+# JSON in sharing memory with local docker orchestrator
+serde_json = "1.0.137"
+# YAML in filestore
+serde_yaml = "0.9.34"
+# checksums based on SHA256
+sha2 = "0.10.8"
+# library error handling API
+snafu = { version = "0.8.5", features = ["futures"] }
+# a runtime for async applications
+tokio = { version = "1.41.0", features = ["full"] }
+# utilities for async calls
+tokio-util = "0.7.13"
+# automated CFFI + bindings in other languages
+uniffi = { version = "0.29.1", features = ["cli"] }
+
+[[bin]]
+name = "uniffi-bindgen"
[dev-dependencies]
-indoc = "2.0.5" # pretty multiline strings
-tempfile = "3.13.0" # creating temp directories
+# pretty multiline strings
+indoc = "2.0.5"
+# creating temp directories
+tempfile = "3.13.0"
[lints.rust]
non_ascii_idents = "deny"
@@ -81,6 +112,7 @@ missing_asserts_for_indexing = { level = "allow", priority = 127 } # missing
missing_docs_in_private_items = { level = "allow", priority = 127 } # missing docs on private ok
missing_inline_in_public_items = { level = "allow", priority = 127 } # let rust compiler determine best inline logic
missing_trait_methods = { level = "allow", priority = 127 } # allow in favor of rustc `implement the missing item`
+module_name_repetitions = { level = "allow", priority = 127 } # allow use of module name in type names
multiple_inherent_impl = { level = "allow", priority = 127 } # required in best practice to limit exposure over UniFFI
must_use_candidate = { level = "allow", priority = 127 } # omitting #[must_use] ok
mod_module_files = { level = "allow", priority = 127 } # mod directories ok
diff --git a/README.md b/README.md
index ec7cc17c..e290980d 100644
--- a/README.md
+++ b/README.md
@@ -4,7 +4,7 @@
# orcapod
-
+
## Tests
@@ -23,9 +23,9 @@ cargo llvm-cov --ignore-filename-regex "bin/.*|lib\.rs" --cobertura --output-pat
```bash
cargo doc --no-deps # gen api docs (target/doc/orcapod/index.html)
-cargo modules dependencies --no-uses --no-fns --focus-on "orcapod::uniffi::model::{Pod}" --layout dot > docs/crate_diagram.dot # orcapod diagram as DOT
-cargo modules dependencies --no-uses --no-fns --focus-on "orcapod::uniffi::model::{Pod}" --layout dot | dot -T png > docs/crate_diagram.png # orcapod diagram as PNG
-cargo modules dependencies --no-uses --no-fns --focus-on "orcapod::uniffi::model::{Pod}" --layout dot | dot -T svg > docs/crate_diagram.svg # orcapod diagram as SVG
+cargo modules dependencies --no-uses --no-fns --focus-on "orcapod::uniffi::model::{Pod}" --layout dot > docs/images/crate_diagram.dot # orcapod diagram as DOT
+cargo modules dependencies --no-uses --no-fns --focus-on "orcapod::uniffi::model::{Pod}" --layout dot | dot -T png > docs/images/crate_diagram.png # orcapod diagram as PNG
+cargo modules dependencies --no-uses --no-fns --focus-on "orcapod::uniffi::model::{Pod}" --layout dot | dot -T svg > docs/images/crate_diagram.svg # orcapod diagram as SVG
```
## Project Management
diff --git a/cspell.json b/cspell.json
index 483caed0..def9f11c 100644
--- a/cspell.json
+++ b/cspell.json
@@ -61,6 +61,16 @@
"tobytes",
"bitflags",
"peaceiris",
- "tintinweb"
+ "tintinweb",
+ "rlib",
+ "cdylib",
+ "toolsai",
+ "bindgen",
+ "nogroup",
+ "venv",
+ "maturin",
+ "ipykernel",
+ "getset",
+ "strsim"
]
}
diff --git a/docs/crate_diagram.svg b/docs/crate_diagram.svg
deleted file mode 100644
index 929e8ba2..00000000
--- a/docs/crate_diagram.svg
+++ /dev/null
@@ -1,292 +0,0 @@
-
-
-
-
-
diff --git a/docs/examples/install_from_source/Dockerfile b/docs/examples/install_from_source/Dockerfile
new file mode 100644
index 00000000..cbd6b6fe
--- /dev/null
+++ b/docs/examples/install_from_source/Dockerfile
@@ -0,0 +1,21 @@
+FROM python:slim
+SHELL ["bash", "-lc"]
+
+## install Rust + Git
+RUN \
+ apt update && \
+ apt install curl gcc git -y && \
+ curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | bash -s -- -y && \
+ echo ". ~/.cargo/env" >> ~/.bashrc
+
+## install remote
+RUN \
+ pip install git+https://github.com/walkerlab/orcapod@dev
+
+## install locally
+# COPY . /tmp/orcapod
+# RUN \
+# pip install /tmp/orcapod && \
+# rm -R /tmp/orcapod
+
+WORKDIR /root
diff --git a/docs/examples/install_from_source/docker-compose.yaml b/docs/examples/install_from_source/docker-compose.yaml
new file mode 100644
index 00000000..e16246fa
--- /dev/null
+++ b/docs/examples/install_from_source/docker-compose.yaml
@@ -0,0 +1,26 @@
+# docker compose down && docker compose up --build
+services:
+ orcapod_python:
+ build:
+ context: ../../..
+ dockerfile: ./docs/examples/install_from_source/Dockerfile
+ command:
+ - bash
+ - -lc
+ - |
+ set -e
+ echo "Install successful"
+
+ # Try importing
+ python - <=1.0,<2.0"]
+build-backend = "maturin"
+
+[tool.maturin]
+features = ["uniffi/cli"]
diff --git a/src/bin/uniffi-bindgen.rs b/src/bin/uniffi-bindgen.rs
new file mode 100644
index 00000000..342d0fb1
--- /dev/null
+++ b/src/bin/uniffi-bindgen.rs
@@ -0,0 +1,6 @@
+//! `uniffi` CLI to generate client bindings e.g. for Python.
+use uniffi;
+
+fn main() {
+ uniffi::uniffi_bindgen_main();
+}
diff --git a/src/core/error.rs b/src/core/error.rs
index ea324f35..0dd158e1 100644
--- a/src/core/error.rs
+++ b/src/core/error.rs
@@ -12,50 +12,67 @@ use std::{
impl From for OrcaError {
fn from(error: BollardError) -> Self {
- Self(Kind::BollardError {
- source: error,
- backtrace: Some(Backtrace::capture()),
- })
+ Self {
+ kind: Kind::BollardError {
+ source: error,
+ backtrace: Some(Backtrace::capture()),
+ },
+ }
}
}
impl From for OrcaError {
fn from(error: glob::PatternError) -> Self {
- Self(Kind::GlobPatternError {
- source: error,
- backtrace: Some(Backtrace::capture()),
- })
+ Self {
+ kind: Kind::GlobPatternError {
+ source: error,
+ backtrace: Some(Backtrace::capture()),
+ },
+ }
}
}
impl From for OrcaError {
fn from(error: io::Error) -> Self {
- Self(Kind::IoError {
- source: error,
- backtrace: Some(Backtrace::capture()),
- })
+ Self {
+ kind: Kind::IoError {
+ source: error,
+ backtrace: Some(Backtrace::capture()),
+ },
+ }
}
}
impl From for OrcaError {
fn from(error: path::StripPrefixError) -> Self {
- Self(Kind::PathPrefixError {
- source: error,
- backtrace: Some(Backtrace::capture()),
- })
+ Self {
+ kind: Kind::PathPrefixError {
+ source: error,
+ backtrace: Some(Backtrace::capture()),
+ },
+ }
}
}
impl From for OrcaError {
fn from(error: serde_json::Error) -> Self {
- Self(Kind::SerdeJsonError {
- source: error,
- backtrace: Some(Backtrace::capture()),
- })
+ Self {
+ kind: Kind::SerdeJsonError {
+ source: error,
+ backtrace: Some(Backtrace::capture()),
+ },
+ }
}
}
impl From for OrcaError {
fn from(error: serde_yaml::Error) -> Self {
- Self(Kind::SerdeYamlError {
- source: error,
- backtrace: Some(Backtrace::capture()),
- })
+ Self {
+ kind: Kind::SerdeYamlError {
+ source: error,
+ backtrace: Some(Backtrace::capture()),
+ },
+ }
+ }
+}
+impl From for OrcaError {
+ fn from(error: Kind) -> Self {
+ Self { kind: error }
}
}
fn format_stack(backtrace: Option<&Backtrace>) -> String {
@@ -71,7 +88,7 @@ fn format_stack(backtrace: Option<&Backtrace>) -> String {
}
impl fmt::Debug for OrcaError {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
- match &self.0 {
+ match &self.kind {
Kind::EmptyResponseWhenLoadingContainerAltImage { backtrace, .. }
| Kind::GeneratedNamesOverflow { backtrace, .. }
| Kind::InvalidFilepath { backtrace, .. }
@@ -88,7 +105,7 @@ impl fmt::Debug for OrcaError {
| Kind::PathPrefixError { backtrace, .. }
| Kind::SerdeJsonError { backtrace, .. }
| Kind::SerdeYamlError { backtrace, .. } => {
- write!(f, "{}{}", self.0, format_stack(backtrace.as_ref()))
+ write!(f, "{}{}", self.kind, format_stack(backtrace.as_ref()))
}
}
}
diff --git a/src/core/model.rs b/src/core/model.rs
index 5e67c1ee..c58b31e0 100644
--- a/src/core/model.rs
+++ b/src/core/model.rs
@@ -11,6 +11,7 @@ use serde_yaml;
use std::{
collections::{BTreeMap, HashMap},
result,
+ sync::Arc,
};
/// Converts a model instance into a consistent yaml.
///
@@ -59,14 +60,15 @@ where
serializer.serialize_str(&pod.hash)
}
-pub(crate) fn deserialize_pod<'de, D>(deserializer: D) -> result::Result
+pub(crate) fn deserialize_pod<'de, D>(deserializer: D) -> result::Result, D::Error>
where
D: Deserializer<'de>,
{
Ok(Pod {
hash: String::deserialize(deserializer)?,
..Pod::default()
- })
+ }
+ .into())
}
pub(crate) fn serialize_pod_job(
@@ -79,12 +81,13 @@ where
serializer.serialize_str(&pod_job.hash)
}
-pub(crate) fn deserialize_pod_job<'de, D>(deserializer: D) -> result::Result
+pub(crate) fn deserialize_pod_job<'de, D>(deserializer: D) -> result::Result, D::Error>
where
D: Deserializer<'de>,
{
Ok(PodJob {
hash: String::deserialize(deserializer)?,
..PodJob::default()
- })
+ }
+ .into())
}
diff --git a/src/core/orchestrator/docker.rs b/src/core/orchestrator/docker.rs
index 7a7e813b..64e533b0 100644
--- a/src/core/orchestrator/docker.rs
+++ b/src/core/orchestrator/docker.rs
@@ -131,7 +131,7 @@ impl LocalDockerOrchestrator {
("org.orcapod.pod.hash".to_owned(), pod_job.pod.hash.clone()),
(
"org.orcapod.pod".to_owned(),
- serde_json::to_string(&pod_job.pod)?,
+ serde_json::to_string(&*pod_job.pod)?,
),
(
"org.orcapod.pod_job.annotation".to_owned(),
diff --git a/src/core/orchestrator/mod.rs b/src/core/orchestrator/mod.rs
index 1c62d47a..64fc75ae 100644
--- a/src/core/orchestrator/mod.rs
+++ b/src/core/orchestrator/mod.rs
@@ -9,7 +9,7 @@ use crate::{
impl PodRun {
pub(crate) fn new(pod_job: &PodJob, assigned_name: String) -> Self {
Self {
- pod_job: pod_job.clone(),
+ pod_job: pod_job.clone().into(),
orchestrator_source: get_type_name::(),
assigned_name,
}
diff --git a/src/core/store/filestore.rs b/src/core/store/filestore.rs
index 0548e22e..9f16ac64 100644
--- a/src/core/store/filestore.rs
+++ b/src/core/store/filestore.rs
@@ -1,9 +1,13 @@
use crate::{
- core::{model::to_yaml, util::get_type_name},
+ core::{
+ model::to_yaml,
+ store::MODEL_NAMESPACE,
+ util::{get_type_name, parse_debug_name},
+ },
uniffi::{
error::{Result, selector},
model::Annotation,
- store::{ModelID, ModelInfo, Store as _, filestore::LocalFileStore},
+ store::{ModelID, ModelInfo, filestore::LocalFileStore},
},
};
use colored::Colorize as _;
@@ -14,7 +18,7 @@ use serde::{Serialize, de::DeserializeOwned};
use serde_yaml;
use snafu::OptionExt as _;
use std::{
- fs,
+ fmt, fs,
path::{Path, PathBuf},
sync::LazyLock,
};
@@ -51,12 +55,17 @@ impl LocalFileStore {
PathBuf::from(format!("annotation/{name}-{version}.yaml"))
}
/// Build the storage path with the model directory (`hash`) and a file's relative path.
- pub fn make_path(&self, hash: &str, relpath: impl AsRef) -> PathBuf {
+ pub fn make_path(
+ &self,
+ model: &T,
+ hash: &str,
+ relpath: impl AsRef,
+ ) -> PathBuf {
PathBuf::from(format!(
"{}/{}/{}/{}",
self.directory.to_string_lossy(),
- Self::MODEL_NAMESPACE,
- get_type_name::().to_snake_case(),
+ MODEL_NAMESPACE,
+ parse_debug_name(model).to_snake_case(),
hash
))
.join(relpath)
@@ -81,13 +90,20 @@ impl LocalFileStore {
/// # Errors
///
/// Will return error if unable to find.
- pub(crate) fn lookup_hash(&self, name: &str, version: &str) -> Result {
- let model_info = Self::find_model_metadata(
- &self.make_path::("*", Self::make_annotation_relpath(name, version)),
- )?
+ pub(crate) fn lookup_hash(
+ &self,
+ model: &T,
+ name: &str,
+ version: &str,
+ ) -> Result {
+ let model_info = Self::find_model_metadata(&self.make_path(
+ model,
+ "*",
+ Self::make_annotation_relpath(name, version),
+ ))?
.next()
.context(selector::NoAnnotationFound {
- class: get_type_name::().to_snake_case(),
+ class: parse_debug_name(model).to_snake_case(),
name: name.to_owned(),
version: version.to_owned(),
})?;
@@ -106,13 +122,13 @@ impl LocalFileStore {
/// # Errors
///
/// Will return `Err` if there is an issue storing the model.
- pub(crate) fn save_model(
+ pub(crate) fn save_model(
&self,
model: &T,
hash: &str,
annotation: Option<&Annotation>,
) -> Result<()> {
- let model_type = get_type_name::();
+ let class = get_type_name::().to_snake_case();
// Save annotation if defined and doesn't collide globally i.e. model, name, version
if let Some(provided_annotation) = annotation {
let relpath = &Self::make_annotation_relpath(
@@ -120,7 +136,7 @@ impl LocalFileStore {
&provided_annotation.version,
);
if let Some((found_hash, found_name, found_version)) =
- Self::find_model_metadata(&self.make_path::("*", relpath))?
+ Self::find_model_metadata(&self.make_path(model, "*", relpath))?
.next()
.and_then(|model_info| {
Some((model_info.hash, model_info.name?, model_info.version?))
@@ -130,7 +146,7 @@ impl LocalFileStore {
"{}",
format!(
"Skip saving {} annotation since `{}`, `{}`, `{}` exists.",
- model_type.bright_cyan(),
+ class.bright_cyan(),
found_hash.bright_cyan(),
found_name.bright_cyan(),
found_version.bright_cyan(),
@@ -139,19 +155,19 @@ impl LocalFileStore {
);
} else {
Self::save_file(
- self.make_path::(hash, relpath),
+ self.make_path(model, hash, relpath),
serde_yaml::to_string(provided_annotation)?,
)?;
}
}
// Save model specification and skip if it already exist e.g. on new annotations
- let spec_file = &self.make_path::(hash, Self::SPEC_RELPATH);
+ let spec_file = &self.make_path(model, hash, Self::SPEC_RELPATH);
if spec_file.exists() {
println!(
"{}",
format!(
"Skip saving {} model since `{}` exists.",
- model_type.bright_cyan(),
+ class.bright_cyan(),
hash.bright_cyan(),
)
.yellow(),
@@ -167,27 +183,33 @@ impl LocalFileStore {
///
/// Will return `Err` if there is an issue loading the model from the store using `name` and
/// `version`.
- pub(crate) fn load_model(
+ pub(crate) fn load_model(
&self,
model_id: &ModelID,
) -> Result<(T, Option, String)> {
match model_id {
ModelID::Hash(hash) => Ok((
- serde_yaml::from_str(&fs::read_to_string(
- self.make_path::(hash, Self::SPEC_RELPATH),
- )?)?,
+ serde_yaml::from_str(&fs::read_to_string(self.make_path(
+ &T::default(),
+ hash,
+ Self::SPEC_RELPATH,
+ ))?)?,
None,
hash.to_owned(),
)),
ModelID::Annotation(name, version) => {
- let hash = self.lookup_hash::(name, version)?;
+ let hash = self.lookup_hash(&T::default(), name, version)?;
Ok((
- serde_yaml::from_str(&fs::read_to_string(
- self.make_path::(&hash, Self::SPEC_RELPATH),
- )?)?,
- serde_yaml::from_str(&fs::read_to_string(
- self.make_path::(&hash, &Self::make_annotation_relpath(name, version)),
- )?)?,
+ serde_yaml::from_str(&fs::read_to_string(self.make_path(
+ &T::default(),
+ &hash,
+ Self::SPEC_RELPATH,
+ ))?)?,
+ serde_yaml::from_str(&fs::read_to_string(self.make_path(
+ &T::default(),
+ &hash,
+ Self::make_annotation_relpath(name, version),
+ ))?)?,
hash,
))
}
@@ -198,8 +220,8 @@ impl LocalFileStore {
/// # Errors
///
/// Will return `Err` if there is an issue querying metadata from existing models in the store.
- pub(crate) fn list_model(&self) -> Result> {
- Ok(Self::find_model_metadata(&self.make_path::("**", "*"))?.collect())
+ pub(crate) fn list_model(&self) -> Result> {
+ Ok(Self::find_model_metadata(&self.make_path(&T::default(), "**", "*"))?.collect())
}
/// How to explicitly delete any stored model and all associated annotations (does not propagate).
///
@@ -207,13 +229,15 @@ impl LocalFileStore {
///
/// Will return `Err` if there is an issue deleting a model from the store using `name` and
/// `version`.
- pub(crate) fn delete_model(&self, model_id: &ModelID) -> Result<()> {
+ pub(crate) fn delete_model(&self, model_id: &ModelID) -> Result<()> {
// assumes propagate = false
let hash = match model_id {
ModelID::Hash(hash) => hash,
- ModelID::Annotation(name, version) => &self.lookup_hash::(name, version)?,
+ ModelID::Annotation(name, version) => {
+ &self.lookup_hash(&T::default(), name, version)?
+ }
};
- let spec_dir = self.make_path::(hash, "");
+ let spec_dir = self.make_path(&T::default(), hash, "");
fs::remove_dir_all(spec_dir)?;
Ok(())
diff --git a/src/core/store/mod.rs b/src/core/store/mod.rs
index fe5e8e56..544190f5 100644
--- a/src/core/store/mod.rs
+++ b/src/core/store/mod.rs
@@ -1 +1,4 @@
+/// Namespace where models will be stored.
+const MODEL_NAMESPACE: &str = "orcapod_model";
+
pub mod filestore;
diff --git a/src/core/util.rs b/src/core/util.rs
index cdf14033..e3a69a65 100644
--- a/src/core/util.rs
+++ b/src/core/util.rs
@@ -1,10 +1,10 @@
use crate::uniffi::error::{Result, selector};
use snafu::OptionExt as _;
-use std::{any::type_name, collections::HashMap};
+use std::{any::type_name, collections::HashMap, fmt};
#[expect(
clippy::unwrap_used,
- reason = "`last()` cannot return `None` since `type_name` always returns `&str`."
+ reason = "Cannot return `None` since `type_name` always returns `&str`."
)]
pub fn get_type_name() -> String {
type_name::()
@@ -14,6 +14,18 @@ pub fn get_type_name() -> String {
.unwrap()
}
+#[expect(
+ clippy::unwrap_used,
+ reason = "Cannot return `None` since debug format always returns `String`."
+)]
+pub fn parse_debug_name(instance: &T) -> String {
+ format!("{instance:?}")
+ .split(' ')
+ .map(str::to_owned)
+ .next()
+ .unwrap()
+}
+
pub fn get<'map, T>(map: &'map HashMap, key: &str) -> Result<&'map T> {
Ok(map.get(key).context(selector::KeyMissing {
key: key.to_owned(),
diff --git a/src/lib.rs b/src/lib.rs
index e2d52d43..94e8829a 100644
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -1,7 +1,29 @@
//! Intuitive compute pipeline orchestration with reproducibility, performance, and scalability in
//! mind.
-
+extern crate uniffi as uniffi_external;
+uniffi_external::setup_scaffolding!();
/// Pure Rust source.
pub mod core;
-/// Exposed `CFFI` client based on `UniFFI`.
+/// Exposed CFFI client based on [uniffi](https://crates.io/crates/uniffi).
+///
+/// `uniffi` brings a lot of convenience in creating a CFFI but we also must meet several
+/// requirements. This means there are several design implications to be aware of.
+///
+/// For instance, anything exposed over the CFFI boundary with `uniffi` (i.e. children of this
+/// `mod`) needs to respect several rules (not exhaustive).
+/// 1. No use of Rust generics
+/// 1. No use of `const` values in traits
+/// 1. Primitives must be owned by the client e.g., `String`, `u8`, `bool`, `f32`, etc.
+/// 1. `BtreeMap` isn't supported but `HashMap` is.
+/// 1. `async` functions must use the custom derive attribute from the
+/// [async-trait](https://crates.io/crates/async-trait) crate
+/// 1. Custom types that are to be owned by client:
+/// 1. are passed by value i.e. as a move
+/// 1. cannot export any methods associated with them i.e. can't be invoked on client side
+/// 1. Custom types that are to be owned by Rust:
+/// 1. are passed by reference using `Arc`
+/// 1. can have methods exported i.e. can be invoked on client side
+/// 1. require exporting constructor methods
+/// 1. require exporting getter methods (e.g. for each field on a `struct`) since the underlying
+/// values are owned by Rust
pub mod uniffi;
diff --git a/src/uniffi/error.rs b/src/uniffi/error.rs
index 34ad3d83..3ef9daff 100644
--- a/src/uniffi/error.rs
+++ b/src/uniffi/error.rs
@@ -14,11 +14,13 @@ use std::{
path::{self, PathBuf},
result,
};
+use uniffi;
/// Shorthand for a Result that returns an `OrcaError`.
pub type Result = result::Result;
/// Possible errors you may encounter.
-#[derive(Snafu, Debug)]
+#[derive(Snafu, Debug, uniffi::Error)]
#[snafu(module(selector), visibility(pub(crate)), context(suffix(false)))]
+#[uniffi(flat_error)]
pub(crate) enum Kind {
#[snafu(display(
"Received an empty response when attempting to load the alternate container image file: {path:?}."
@@ -103,16 +105,21 @@ pub(crate) enum Kind {
},
}
/// A stable error API interface.
-#[derive(Snafu)]
-pub struct OrcaError(pub(crate) Kind);
+#[derive(Snafu, uniffi::Object)]
+#[snafu(display("{self:?}"))]
+#[uniffi::export(Display)]
+pub struct OrcaError {
+ pub(crate) kind: Kind,
+}
+#[uniffi::export]
impl OrcaError {
/// Returns `true` if the error was caused by an invalid model annotation.
pub const fn is_invalid_annotation(&self) -> bool {
- matches!(self.0, Kind::NoAnnotationFound { .. })
+ matches!(self.kind, Kind::NoAnnotationFound { .. })
}
/// Returns `true` if the error was caused by querying a purged pod run.
pub const fn is_purged_pod_run(&self) -> bool {
- matches!(self.0, Kind::NoMatchingPodRun { .. })
+ matches!(self.kind, Kind::NoMatchingPodRun { .. })
}
}
diff --git a/src/uniffi/model.rs b/src/uniffi/model.rs
index 9337cf66..b2aeb24b 100644
--- a/src/uniffi/model.rs
+++ b/src/uniffi/model.rs
@@ -8,13 +8,32 @@ use crate::{
},
uniffi::{error::Result, orchestrator::Status},
};
+use derive_more::Display;
+use getset::CloneGetters;
use serde::{Deserialize, Serialize};
-use std::{collections::HashMap, path::PathBuf};
+use std::{collections::HashMap, path::PathBuf, sync::Arc};
+use uniffi;
+
+/// Available models.
+#[derive(uniffi::Enum, Debug)]
+pub enum ModelType {
+ /// A reusable, containerized computational unit.
+ Pod,
+ /// A compute job that specifies resource requests and input/output targets.
+ PodJob,
+ /// Result from a compute job run.
+ PodResult,
+}
// --- core model structs ---
/// A reusable, containerized computational unit.
-#[derive(Serialize, Deserialize, Debug, PartialEq, Default, Clone)]
+#[derive(
+ uniffi::Object, Serialize, Deserialize, Debug, PartialEq, Default, Clone, Display, CloneGetters,
+)]
+#[getset(get_clone, impl_attrs = "#[uniffi::export]")]
+#[display("{self:#?}")]
+#[uniffi::export(Display)]
pub struct Pod {
/// Metadata that doesn't affect reproducibility.
#[serde(skip)]
@@ -44,12 +63,14 @@ pub struct Pod {
pub required_gpu: Option,
}
+#[uniffi::export]
impl Pod {
/// Construct a new pod instance.
///
/// # Errors
///
/// Will return `Err` if there is an issue initializing a `Pod` instance.
+ #[uniffi::constructor]
pub fn new(
annotation: Option,
image: String,
@@ -83,7 +104,12 @@ impl Pod {
}
/// A compute job that specifies resource requests and input/output targets.
-#[derive(Serialize, Deserialize, Debug, PartialEq, Clone, Default)]
+#[derive(
+ uniffi::Object, Serialize, Deserialize, Debug, PartialEq, Clone, Default, Display, CloneGetters,
+)]
+#[getset(get_clone, impl_attrs = "#[uniffi::export]")]
+#[display("{self:#?}")]
+#[uniffi::export(Display)]
pub struct PodJob {
/// Metadata that doesn't affect reproducibility.
#[serde(skip)]
@@ -93,7 +119,7 @@ pub struct PodJob {
pub hash: String,
/// A pod to base the pod job on.
#[serde(serialize_with = "serialize_pod", deserialize_with = "deserialize_pod")]
- pub pod: Pod,
+ pub pod: Arc,
/// Attached, external input streams.
#[serde(serialize_with = "serialize_hashmap")]
pub input_stream: HashMap,
@@ -108,15 +134,17 @@ pub struct PodJob {
pub env_vars: Option>,
}
+#[uniffi::export]
impl PodJob {
/// Construct a new pod job instance.
///
/// # Errors
///
/// Will return `Err` if there is an issue initializing a `PodJob` instance.
+ #[uniffi::constructor]
pub fn new(
annotation: Option,
- pod: Pod,
+ pod: Arc,
mut input_stream: HashMap,
output_dir: OrcaPath,
cpu_limit: f32,
@@ -160,7 +188,7 @@ impl PodJob {
}
/// Result from a compute job run.
-#[derive(Serialize, Deserialize, Debug, Clone, PartialEq)]
+#[derive(uniffi::Record, Serialize, Deserialize, Debug, Clone, PartialEq, Default)]
pub struct PodResult {
/// Metadata that doesn't affect reproducibility.
#[serde(skip)]
@@ -173,7 +201,7 @@ pub struct PodResult {
serialize_with = "serialize_pod_job",
deserialize_with = "deserialize_pod_job"
)]
- pub pod_job: PodJob,
+ pub pod_job: Arc,
/// Name given by orchestrator.
pub assigned_name: String,
/// Status of compute run when terminated.
@@ -192,7 +220,7 @@ impl PodResult {
/// Will return `Err` if there is an issue initializing a `PodResult` instance.
pub fn new(
annotation: Option,
- pod_job: PodJob,
+ pod_job: Arc,
assigned_name: String,
status: Status,
created: u64,
@@ -217,7 +245,7 @@ impl PodResult {
// --- util types ---
/// Standard metadata structure for all model instances.
-#[derive(Serialize, Deserialize, Debug, PartialEq, Eq, Clone)]
+#[derive(uniffi::Record, Serialize, Deserialize, Debug, PartialEq, Eq, Clone)]
pub struct Annotation {
/// A unique name.
pub name: String,
@@ -227,7 +255,7 @@ pub struct Annotation {
pub description: String,
}
/// Specification for GPU requirements in computation.
-#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)]
+#[derive(uniffi::Record, Serialize, Deserialize, Debug, Clone, PartialEq, Eq)]
pub struct GPURequirement {
/// GPU model specification.
pub model: GPUModel,
@@ -237,7 +265,7 @@ pub struct GPURequirement {
pub count: u16,
}
/// GPU model specification.
-#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)]
+#[derive(uniffi::Enum, Serialize, Deserialize, Debug, Clone, PartialEq, Eq)]
pub enum GPUModel {
/// NVIDIA-manufactured card where `String` is the specific model e.g. ???
NVIDIA(String),
@@ -246,7 +274,7 @@ pub enum GPUModel {
}
/// Streams are named and represent an abstraction for the file(s) that represent some particular
/// data.
-#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)]
+#[derive(uniffi::Record, Serialize, Deserialize, Debug, Clone, PartialEq, Eq)]
pub struct StreamInfo {
/// Path to stream file or directory.
pub path: PathBuf,
@@ -254,7 +282,7 @@ pub struct StreamInfo {
pub match_pattern: String,
}
/// Input options.
-#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq)]
+#[derive(uniffi::Enum, Serialize, Deserialize, Debug, Clone, PartialEq, Eq)]
#[serde(untagged)]
pub enum Input {
/// A single BLOB.
@@ -263,7 +291,7 @@ pub enum Input {
Collection(Vec),
}
/// Location of BLOB data.
-#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Default)]
+#[derive(uniffi::Record, Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Default)]
pub struct OrcaPath {
/// Namespace alias.
pub namespace: String,
@@ -272,7 +300,7 @@ pub struct OrcaPath {
}
/// BLOB with metadata.
-#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Default)]
+#[derive(uniffi::Record, Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Default)]
pub struct Blob {
/// BLOB available options.
pub kind: BlobKind,
@@ -282,7 +310,7 @@ pub struct Blob {
pub checksum: String,
}
/// File or directory options for BLOBs.
-#[derive(Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Default)]
+#[derive(uniffi::Enum, Serialize, Deserialize, Debug, Clone, PartialEq, Eq, Default)]
pub enum BlobKind {
/// A single file.
#[default]
@@ -290,3 +318,11 @@ pub enum BlobKind {
/// A single directory.
Directory,
}
+
+// --- utils ----
+
+uniffi::custom_type!(PathBuf, String, {
+ remote,
+ try_lift: |val| Ok(PathBuf::from(&val)),
+ lower: |obj| obj.display().to_string(),
+});
diff --git a/src/uniffi/orchestrator/docker.rs b/src/uniffi/orchestrator/docker.rs
index 5f997a2b..7a2e75e1 100644
--- a/src/uniffi/orchestrator/docker.rs
+++ b/src/uniffi/orchestrator/docker.rs
@@ -6,28 +6,35 @@ use crate::{
orchestrator::{ImageKind, Orchestrator, PodRun, RunInfo},
},
};
+use async_trait;
use bollard::{
Docker,
container::{RemoveContainerOptions, StartContainerOptions, WaitContainerOptions},
image::{CreateImageOptions, ImportImageOptions},
};
+use derive_more::Display;
use futures_util::stream::{StreamExt as _, TryStreamExt as _};
use snafu::{OptionExt as _, futures::TryFutureExt as _};
-use std::{collections::HashMap, path::PathBuf};
+use std::{collections::HashMap, path::PathBuf, sync::Arc};
use tokio::{fs::File, runtime::Runtime};
use tokio_util::{
bytes::{Bytes, BytesMut},
codec::{BytesCodec, FramedRead},
};
+use uniffi;
/// Support for an orchestration engine using a local docker installation.
-#[derive(Debug)]
+#[derive(uniffi::Object, Debug, Display)]
+#[display("{self:#?}")]
+#[uniffi::export(Display)]
pub struct LocalDockerOrchestrator {
/// API to interact with Docker daemon.
pub api: Docker,
async_driver: Runtime,
}
+#[uniffi::export]
+#[async_trait::async_trait]
impl Orchestrator for LocalDockerOrchestrator {
fn start_with_altimage_blocking(
&self,
@@ -161,7 +168,7 @@ impl Orchestrator for LocalDockerOrchestrator {
pod_job
.hash
.clone_from(get(&run_info.labels, "org.orcapod.pod_job.hash")?);
- pod_job.pod = pod;
+ pod_job.pod = pod.into();
Ok(PodRun::new::(&pod_job, assigned_name))
})
.collect()
@@ -204,7 +211,7 @@ impl Orchestrator for LocalDockerOrchestrator {
let result_info = self.get_info(pod_run).await?;
PodResult::new(
None,
- pod_run.pod_job.clone(),
+ Arc::clone(&pod_run.pod_job),
pod_run.assigned_name.clone(),
result_info.status,
result_info.created,
@@ -217,6 +224,7 @@ impl Orchestrator for LocalDockerOrchestrator {
}
}
+#[uniffi::export]
impl LocalDockerOrchestrator {
/// How to create a local docker orchestrator with an absolute path on docker host where binds
/// will be mounted from.
@@ -224,6 +232,7 @@ impl LocalDockerOrchestrator {
/// # Errors
///
/// Will return `Err` if there is an issue creating a local docker orchestrator.
+ #[uniffi::constructor]
pub fn new() -> Result {
Ok(Self {
api: Docker::connect_with_local_defaults()?,
diff --git a/src/uniffi/orchestrator/mod.rs b/src/uniffi/orchestrator/mod.rs
index e247c0fa..82431906 100644
--- a/src/uniffi/orchestrator/mod.rs
+++ b/src/uniffi/orchestrator/mod.rs
@@ -3,8 +3,10 @@ use crate::uniffi::{
model::{OrcaPath, PodJob, PodResult},
};
use serde::{Deserialize, Serialize};
-use std::{collections::HashMap, future::Future, path::PathBuf};
+use std::{collections::HashMap, path::PathBuf, sync::Arc};
+use uniffi;
/// Options for sourcing compute environment images.
+#[derive(uniffi::Enum)]
pub enum ImageKind {
/// A published compute environment image in a container registry. Argument formatted as
/// `{server.com/}{name}:{tag}`. Server is optional e.g. (`alpine:latest`).
@@ -13,7 +15,7 @@ pub enum ImageKind {
Tarball(OrcaPath),
}
/// Status of a particular compute run.
-#[derive(Serialize, Deserialize, Debug, PartialEq, Eq, Clone)]
+#[derive(uniffi::Enum, Serialize, Deserialize, Debug, PartialEq, Eq, Clone, Default)]
pub enum Status {
/// Run is ongoing.
Running,
@@ -21,9 +23,12 @@ pub enum Status {
Completed,
/// Run failed with the provided error code.
Failed(i16),
+ /// No status set.
+ #[default]
+ Unset,
}
/// Run metadata
-#[derive(Debug)]
+#[derive(uniffi::Record, Debug)]
pub struct RunInfo {
/// Environment utilized.
pub image: String,
@@ -47,10 +52,10 @@ pub struct RunInfo {
pub memory_limit: u64,
}
/// Current computation managed by orchestrator.
-#[derive(Debug)]
+#[derive(uniffi::Record, Debug)]
pub struct PodRun {
/// Original compute request.
- pub pod_job: PodJob,
+ pub pod_job: Arc,
/// Name of orchestrator that created the run.
pub orchestrator_source: String,
/// Name given by orchestrator.
@@ -58,7 +63,9 @@ pub struct PodRun {
}
/// API for standard behavior of any container orchestration engine supported.
-pub trait Orchestrator {
+#[uniffi::export]
+#[async_trait::async_trait]
+pub trait Orchestrator: Send + Sync {
/// How to synchronously start containers with an alternate image.
///
/// # Errors
@@ -109,46 +116,46 @@ pub trait Orchestrator {
/// # Errors
///
/// Will return `Err` if there is an issue starting the container.
- fn start_with_altimage(
+ async fn start_with_altimage(
&self,
namespace_lookup: &HashMap,
pod_job: &PodJob,
image: &ImageKind,
- ) -> impl Future