Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .devcontainer/Dockerfile
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
# DevContainer image
FROM rust:1.85-slim
FROM rust:1.86-slim
RUN \
adduser --system --disabled-password --shell /bin/bash --home /home/vscode vscode && \
# install docker
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/tests.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ jobs:
- name: Install Rust + components
uses: actions-rust-lang/setup-rust-toolchain@v1
with:
toolchain: 1.85
toolchain: 1.86
components: rustfmt,clippy
- name: Install Rust code coverage
uses: taiki-e/install-action@cargo-llvm-cov
Expand Down
1 change: 1 addition & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -81,6 +81,7 @@ missing_asserts_for_indexing = { level = "allow", priority = 127 } # missing
missing_docs_in_private_items = { level = "allow", priority = 127 } # missing docs on private ok
missing_inline_in_public_items = { level = "allow", priority = 127 } # let rust compiler determine best inline logic
missing_trait_methods = { level = "allow", priority = 127 } # allow in favor of rustc `implement the missing item`
multiple_inherent_impl = { level = "allow", priority = 127 } # required in best practice to limit exposure over UniFFI
must_use_candidate = { level = "allow", priority = 127 } # omitting #[must_use] ok
mod_module_files = { level = "allow", priority = 127 } # mod directories ok
non_ascii_literal = { level = "allow", priority = 127 } # non-ascii char in string literal ok
Expand Down
2 changes: 2 additions & 0 deletions cspell.json
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,8 @@
"nanocore",
"numpy",
"graphviz",
"uniffi",
"cffi"
],
"ignoreWords": [
"relpath",
Expand Down
12 changes: 7 additions & 5 deletions src/crypto.rs → src/core/crypto.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,9 @@
use crate::{
error::Result,
model::{Blob, BlobKind},
util::get,
core::util::get,
uniffi::{
error::Result,
model::{Blob, BlobKind},
},
};
use serde_yaml;
use sha2::{Digest as _, Sha256};
Expand All @@ -21,7 +23,7 @@ use std::{
clippy::indexing_slicing,
reason = "Reading less than 0 is impossible."
)]
pub fn hash_stream(stream: &mut impl Read) -> Result<String> {
pub(crate) fn hash_stream(stream: &mut impl Read) -> Result<String> {
const BUFFER_SIZE: usize = 8 << 10; // 8KB chunks to match with page size typically found
let mut hash = Sha256::new();

Expand Down Expand Up @@ -79,7 +81,7 @@ pub fn hash_dir(dirpath: impl AsRef<Path>) -> Result<String> {
/// # Errors
///
/// Will return error if hashing fails on file or directory.
pub fn hash_blob(
pub(crate) fn hash_blob(
namespace_lookup: &HashMap<String, PathBuf, RandomState>,
blob: Blob,
) -> Result<Blob> {
Expand Down
62 changes: 62 additions & 0 deletions src/core/error.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,62 @@
use crate::uniffi::error::{Kind, OrcaError};
use bollard::errors::Error as BollardError;
use glob;
use serde_json;
use serde_yaml;
use std::{
fmt::{self, Display, Formatter},
io, path,
};

impl Display for OrcaError {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.kind)
}
}
impl From<BollardError> for OrcaError {
fn from(error: BollardError) -> Self {
Self {
kind: Kind::BollardError(error),
}
}
}
impl From<glob::PatternError> for OrcaError {
fn from(error: glob::PatternError) -> Self {
Self {
kind: Kind::GlobPatternError(error),
}
}
}
impl From<io::Error> for OrcaError {
fn from(error: io::Error) -> Self {
Self {
kind: Kind::IoError(error),
}
}
}
impl From<path::StripPrefixError> for OrcaError {
fn from(error: path::StripPrefixError) -> Self {
Self {
kind: Kind::PathPrefixError(error),
}
}
}
impl From<serde_json::Error> for OrcaError {
fn from(error: serde_json::Error) -> Self {
Self {
kind: Kind::SerdeJsonError(error),
}
}
}
impl From<serde_yaml::Error> for OrcaError {
fn from(error: serde_yaml::Error) -> Self {
Self {
kind: Kind::SerdeYamlError(error),
}
}
}
impl From<Kind> for OrcaError {
fn from(kind: Kind) -> Self {
Self { kind }
}
}
8 changes: 8 additions & 0 deletions src/core/mod.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,8 @@
/// State change verification via cryptographic utilities.
pub mod crypto;
pub(crate) mod error;
/// Components of the data model.
pub mod model;
pub(crate) mod orchestrator;
pub(crate) mod store;
pub(crate) mod util;
90 changes: 90 additions & 0 deletions src/core/model.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,90 @@
use crate::{
core::util::get_type_name,
uniffi::{
error::Result,
model::{Pod, PodJob},
},
};
use heck::ToSnakeCase as _;
use serde::{Deserialize as _, Deserializer, Serialize, Serializer};
use serde_yaml;
use std::{
collections::{BTreeMap, HashMap},
result,
};
/// Converts a model instance into a consistent yaml.
///
/// # Errors
///
/// Will return `Err` if there is an issue converting an `instance` into YAML (w/o annotation).
pub fn to_yaml<T: Serialize>(instance: &T) -> Result<String> {
let mut yaml = serde_yaml::to_string(instance)?;
yaml.insert_str(
0,
&format!("class: {}\n", get_type_name::<T>().to_snake_case()),
); // replace class at top

Ok(yaml)
}

pub(crate) fn serialize_hashmap<S, K: Ord + Serialize, V: Serialize>(
map: &HashMap<K, V>,
serializer: S,
) -> result::Result<S::Ok, S::Error>
where
S: Serializer,
{
let sorted = map.iter().collect::<BTreeMap<_, _>>();
sorted.serialize(serializer)
}

#[expect(clippy::ref_option, reason = "Serde requires this signature.")]
pub(crate) fn serialize_hashmap_option<S, K: Ord + Serialize, V: Serialize>(
map_option: &Option<HashMap<K, V>>,
serializer: S,
) -> result::Result<S::Ok, S::Error>
where
S: Serializer,
{
let sorted = map_option
.as_ref()
.map(|map| map.iter().collect::<BTreeMap<_, _>>());
sorted.serialize(serializer)
}

pub(crate) fn serialize_pod<S>(pod: &Pod, serializer: S) -> result::Result<S::Ok, S::Error>
where
S: Serializer,
{
serializer.serialize_str(&pod.hash)
}

pub(crate) fn deserialize_pod<'de, D>(deserializer: D) -> result::Result<Pod, D::Error>
where
D: Deserializer<'de>,
{
Ok(Pod {
hash: String::deserialize(deserializer)?,
..Pod::default()
})
}

pub(crate) fn serialize_pod_job<S>(
pod_job: &PodJob,
serializer: S,
) -> result::Result<S::Ok, S::Error>
where
S: Serializer,
{
serializer.serialize_str(&pod_job.hash)
}

pub(crate) fn deserialize_pod_job<'de, D>(deserializer: D) -> result::Result<PodJob, D::Error>
where
D: Deserializer<'de>,
{
Ok(PodJob {
hash: String::deserialize(deserializer)?,
..PodJob::default()
})
}
Loading
Loading