diff --git a/.circleci/config.yml b/.circleci/config.yml index 95dceb694..b2ef658ca 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -89,6 +89,7 @@ commands: shuttle-persist = { path = "$PWD/resources/persist" } shuttle-shared-db = { path = "$PWD/resources/shared-db" } shuttle-secrets = { path = "$PWD/resources/secrets" } + shuttle-static-folder = { path = "$PWD/resources/static-folder" } EOF install-rust: steps: @@ -260,6 +261,7 @@ workflows: - resources/persist - resources/secrets - resources/shared-db + - resources/static-folder - service-test: requires: - workspace-clippy diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 5d38beb59..145a72150 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -49,6 +49,7 @@ shuttle-aws-rds = { path = "[base]/shuttle/resources/aws-rds" } shuttle-persist = { path = "[base]/shuttle/resources/persist" } shuttle-shared-db = { path = "[base]/shuttle/resources/shared-db" } shuttle-secrets = { path = "[base]/shuttle/resources/secrets" } +shuttle-static-folder = { path = "[base]/shuttle/resources/static-folder" } ``` Prime gateway database with an admin user: diff --git a/Cargo.toml b/Cargo.toml index 6ae4b77e3..6a8aeed4b 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -16,5 +16,6 @@ exclude = [ "resources/aws-rds", "resources/persist", "resources/secrets", - "resources/shared-db" + "resources/shared-db", + "resources/static-folder" ] diff --git a/Makefile b/Makefile index 8ba720726..e04b5791d 100644 --- a/Makefile +++ b/Makefile @@ -188,6 +188,7 @@ publish: publish-resources publish-cargo-shuttle publish-resources: publish-resources/aws-rds \ publish-resources/persist \ publish-resources/shared-db + publish-resources/static-folder publish-cargo-shuttle: publish-resources/secrets cd cargo-shuttle; cargo publish diff --git a/cargo-shuttle/src/factory.rs b/cargo-shuttle/src/factory.rs index a4cf1c74f..fa9b5ff52 100644 --- a/cargo-shuttle/src/factory.rs +++ b/cargo-shuttle/src/factory.rs @@ -23,6 +23,7 @@ use shuttle_service::{database::Type, error::CustomError, Factory, ServiceName}; use std::{ collections::{BTreeMap, HashMap}, io::stdout, + path::PathBuf, time::Duration, }; use tokio::time::sleep; @@ -32,14 +33,20 @@ pub struct LocalFactory { docker: Docker, service_name: ServiceName, secrets: BTreeMap, + working_directory: PathBuf, } impl LocalFactory { - pub fn new(service_name: ServiceName, secrets: BTreeMap) -> Result { + pub fn new( + service_name: ServiceName, + secrets: BTreeMap, + working_directory: PathBuf, + ) -> Result { Ok(Self { docker: Docker::connect_with_local_defaults()?, service_name, secrets, + working_directory, }) } } @@ -176,6 +183,14 @@ impl Factory for LocalFactory { fn get_service_name(&self) -> ServiceName { self.service_name.clone() } + + fn get_build_path(&self) -> Result { + Ok(self.working_directory.clone()) + } + + fn get_storage_path(&self) -> Result { + Ok(self.working_directory.clone()) + } } impl LocalFactory { diff --git a/cargo-shuttle/src/lib.rs b/cargo-shuttle/src/lib.rs index 53838c344..be36fe97d 100644 --- a/cargo-shuttle/src/lib.rs +++ b/cargo-shuttle/src/lib.rs @@ -309,7 +309,11 @@ impl Shuttle { let loader = Loader::from_so_file(so_path)?; - let mut factory = LocalFactory::new(self.ctx.project_name().clone(), secrets)?; + let mut factory = LocalFactory::new( + self.ctx.project_name().clone(), + secrets, + working_directory.to_path_buf(), + )?; let addr = SocketAddr::new(Ipv4Addr::LOCALHOST.into(), run_args.port); trace!("loading project"); diff --git a/deployer/prepare.sh b/deployer/prepare.sh index c12581b19..3bbe3dc53 100755 --- a/deployer/prepare.sh +++ b/deployer/prepare.sh @@ -12,7 +12,8 @@ shuttle-service = { path = "/usr/src/shuttle/service" } shuttle-aws-rds = { path = "/usr/src/shuttle/resources/aws-rds" } shuttle-persist = { path = "/usr/src/shuttle/resources/persist" } shuttle-shared-db = { path = "/usr/src/shuttle/resources/shared-db" } -shuttle-secrets = { path = "/usr/src/shuttle/resources/secrets" }' > $CARGO_HOME/config.toml +shuttle-secrets = { path = "/usr/src/shuttle/resources/secrets" } +shuttle-static-folder = { path = "/usr/src/shuttle/resources/static-folder" }' > $CARGO_HOME/config.toml # Prefetch crates.io index cd /usr/src/shuttle/service diff --git a/deployer/src/deployment/deploy_layer.rs b/deployer/src/deployment/deploy_layer.rs index 3d4b6e630..42e0efdf6 100644 --- a/deployer/src/deployment/deploy_layer.rs +++ b/deployer/src/deployment/deploy_layer.rs @@ -362,8 +362,9 @@ mod tests { use crate::{ deployment::{ - deploy_layer::LogType, provisioner_factory, runtime_logger, ActiveDeploymentsGetter, - Built, DeploymentManager, Queued, + deploy_layer::LogType, provisioner_factory, runtime_logger, + storage_manager::StorageManager, ActiveDeploymentsGetter, Built, DeploymentManager, + Queued, }, persistence::{SecretRecorder, State}, }; @@ -460,6 +461,8 @@ mod tests { &self, _project_name: shuttle_common::project::ProjectName, _service_id: Uuid, + _deployment_id: Uuid, + _storage_manager: StorageManager, ) -> Result { Ok(StubProvisionerFactory) } @@ -485,6 +488,14 @@ mod tests { fn get_service_name(&self) -> shuttle_service::ServiceName { panic!("did not expect any deploy_layer test to get the service name") } + + fn get_build_path(&self) -> Result { + panic!("did not expect any deploy_layer test to get the build path") + } + + fn get_storage_path(&self) -> Result { + panic!("did not expect any deploy_layer test to get the storage path") + } } struct StubRuntimeLoggerFactory; diff --git a/deployer/src/deployment/mod.rs b/deployer/src/deployment/mod.rs index 0d2c74071..865d298c2 100644 --- a/deployer/src/deployment/mod.rs +++ b/deployer/src/deployment/mod.rs @@ -3,6 +3,7 @@ pub mod provisioner_factory; mod queue; mod run; pub mod runtime_logger; +mod storage_manager; use std::path::PathBuf; @@ -15,7 +16,7 @@ use crate::persistence::{SecretRecorder, State}; use tokio::sync::{broadcast, mpsc}; use uuid::Uuid; -use self::deploy_layer::LogRecorder; +use self::{deploy_layer::LogRecorder, storage_manager::StorageManager}; const QUEUE_BUFFER_SIZE: usize = 100; const RUN_BUFFER_SIZE: usize = 100; @@ -48,7 +49,7 @@ impl DeploymentManager { build_log_recorder, secret_recorder, active_deployment_getter, - artifacts_path, + StorageManager::new(artifacts_path), ), kill_send, } @@ -109,7 +110,7 @@ impl Pipeline { build_log_recorder: impl LogRecorder, secret_recorder: impl SecretRecorder, active_deployment_getter: impl ActiveDeploymentsGetter, - artifacts_path: PathBuf, + storage_manager: StorageManager, ) -> Pipeline { let (queue_send, queue_recv) = mpsc::channel(QUEUE_BUFFER_SIZE); let (run_send, run_recv) = mpsc::channel(RUN_BUFFER_SIZE); @@ -121,7 +122,7 @@ impl Pipeline { run_send_clone, build_log_recorder, secret_recorder, - artifacts_path.clone(), + storage_manager.clone(), )); tokio::spawn(run::task( run_recv, @@ -129,7 +130,7 @@ impl Pipeline { abstract_factory, runtime_logger_factory, active_deployment_getter, - artifacts_path, + storage_manager, )); Pipeline { diff --git a/deployer/src/deployment/provisioner_factory.rs b/deployer/src/deployment/provisioner_factory.rs index 9704d86ba..d888892ff 100644 --- a/deployer/src/deployment/provisioner_factory.rs +++ b/deployer/src/deployment/provisioner_factory.rs @@ -1,4 +1,4 @@ -use std::collections::BTreeMap; +use std::{collections::BTreeMap, path::PathBuf}; use async_trait::async_trait; use shuttle_common::{database, DatabaseReadyInfo}; @@ -16,6 +16,8 @@ use uuid::Uuid; use crate::persistence::{Resource, ResourceRecorder, ResourceType, SecretGetter}; +use super::storage_manager::StorageManager; + /// Trait to make it easy to get a factory (service locator) for each service being started #[async_trait] pub trait AbstractFactory: Send + Sync + 'static { @@ -27,6 +29,8 @@ pub trait AbstractFactory: Send + Sync + 'static { &self, service_name: ServiceName, service_id: Uuid, + deployment_id: Uuid, + storage_manager: StorageManager, ) -> Result; } @@ -47,6 +51,8 @@ impl AbstractFactory for AbstractProvision &self, service_name: ServiceName, service_id: Uuid, + deployment_id: Uuid, + storage_manager: StorageManager, ) -> Result { let provisioner_client = ProvisionerClient::connect(self.provisioner_uri.clone()).await?; @@ -54,6 +60,8 @@ impl AbstractFactory for AbstractProvision provisioner_client, service_name, service_id, + deployment_id, + storage_manager, self.resource_recorder.clone(), self.secret_getter.clone(), )) @@ -80,6 +88,8 @@ pub enum ProvisionerError { pub struct ProvisionerFactory { service_name: ServiceName, service_id: Uuid, + deployment_id: Uuid, + storage_manager: StorageManager, provisioner_client: ProvisionerClient, info: Option, resource_recorder: R, @@ -92,6 +102,8 @@ impl ProvisionerFactory { provisioner_client: ProvisionerClient, service_name: ServiceName, service_id: Uuid, + deployment_id: Uuid, + storage_manager: StorageManager, resource_recorder: R, secret_getter: S, ) -> Self { @@ -99,6 +111,8 @@ impl ProvisionerFactory { provisioner_client, service_name, service_id, + deployment_id, + storage_manager, info: None, resource_recorder, secret_getter, @@ -179,4 +193,16 @@ impl Factory for ProvisionerFactory fn get_service_name(&self) -> ServiceName { self.service_name.clone() } + + fn get_build_path(&self) -> Result { + self.storage_manager + .service_build_path(self.service_name.as_str()) + .map_err(Into::into) + } + + fn get_storage_path(&self) -> Result { + self.storage_manager + .deployment_storage_path(self.service_name.as_str(), &self.deployment_id) + .map_err(Into::into) + } } diff --git a/deployer/src/deployment/queue.rs b/deployer/src/deployment/queue.rs index 298e36eb8..a76fe83c3 100644 --- a/deployer/src/deployment/queue.rs +++ b/deployer/src/deployment/queue.rs @@ -1,4 +1,5 @@ use super::deploy_layer::{Log, LogRecorder, LogType}; +use super::storage_manager::StorageManager; use super::{Built, QueueReceiver, RunSender, State}; use crate::error::{Error, Result, TestError}; use crate::persistence::{LogLevel, SecretRecorder}; @@ -31,23 +32,10 @@ pub async fn task( run_send: RunSender, log_recorder: impl LogRecorder, secret_recorder: impl SecretRecorder, - artifacts_path: PathBuf, + storage_manager: StorageManager, ) { info!("Queue task started"); - // Path of the directory that contains extracted service Cargo projects. - let builds_path = artifacts_path.join("shuttle-builds"); - - // The directory in which compiled '.so' files are stored. - let libs_path = artifacts_path.join("shuttle-libs"); - - fs::create_dir_all(&builds_path) - .await - .expect("could not create builds directory"); - fs::create_dir_all(&libs_path) - .await - .expect("could not create libs directory"); - while let Some(queued) = recv.recv().await { let id = queued.id; @@ -56,8 +44,7 @@ pub async fn task( let run_send_cloned = run_send.clone(); let log_recorder = log_recorder.clone(); let secret_recorder = secret_recorder.clone(); - let builds_path = builds_path.clone(); - let libs_path = libs_path.clone(); + let storage_manager = storage_manager.clone(); tokio::spawn(async move { let parent_cx = global::get_text_map_propagator(|propagator| { @@ -68,7 +55,7 @@ pub async fn task( async move { match queued - .handle(builds_path, libs_path, log_recorder, secret_recorder) + .handle(storage_manager, log_recorder, secret_recorder) .await { Ok(built) => promote_to_run(built, run_send_cloned).await, @@ -112,17 +99,16 @@ pub struct Queued { } impl Queued { - #[instrument(skip(self, builds_path, libs_path, log_recorder, secret_recorder), fields(id = %self.id, state = %State::Building))] + #[instrument(skip(self, storage_manager, log_recorder, secret_recorder), fields(id = %self.id, state = %State::Building))] async fn handle( self, - builds_path: PathBuf, - libs_path: PathBuf, + storage_manager: StorageManager, log_recorder: impl LogRecorder, secret_recorder: impl SecretRecorder, ) -> Result { info!("Extracting received data"); - let project_path = builds_path.join(&self.service_name); + let project_path = storage_manager.service_build_path(&self.service_name)?; extract_tar_gz_data(self.data.as_slice(), &project_path).await?; @@ -182,7 +168,7 @@ impl Queued { info!("Moving built library"); - store_lib(libs_path, so_path, &self.id).await?; + store_lib(&storage_manager, so_path, &self.id).await?; let built = Built { id: self.id, @@ -248,8 +234,6 @@ async fn extract_tar_gz_data(data: impl Read, dest: impl AsRef) -> Result< let mut archive = Archive::new(tar); archive.set_overwrite(true); - fs::create_dir_all(&dest).await?; - // Clear directory first let mut entries = fs::read_dir(&dest).await?; while let Some(entry) = entries.next_entry().await? { @@ -343,13 +327,13 @@ async fn run_pre_deploy_tests( } /// Store 'so' file in the libs folder -#[instrument(skip(storage_dir_path, so_path, id))] +#[instrument(skip(storage_manager, so_path, id))] async fn store_lib( - storage_dir_path: impl AsRef, + storage_manager: &StorageManager, so_path: impl AsRef, id: &Uuid, ) -> Result<()> { - let new_so_path = storage_dir_path.as_ref().join(id.to_string()); + let new_so_path = storage_manager.deployment_library_path(id)?; fs::rename(so_path, new_so_path).await?; @@ -364,7 +348,7 @@ mod tests { use tokio::fs; use uuid::Uuid; - use crate::error::TestError; + use crate::{deployment::storage_manager::StorageManager, error::TestError}; #[tokio::test] async fn extract_tar_gz_data() { @@ -478,22 +462,24 @@ ff0e55bda1ff01000000000000000000e0079c01ff12a55500280000", async fn store_lib() { let libs_dir = TempDir::new("lib-store").unwrap(); let libs_p = libs_dir.path(); + let storage_manager = StorageManager::new(libs_p.to_path_buf()); - let build_dir = TempDir::new("build-store").unwrap(); - let build_p = build_dir.path(); + let build_p = storage_manager.builds_path().unwrap(); let so_path = build_p.join("xyz.so"); let id = Uuid::new_v4(); fs::write(&so_path, "barfoo").await.unwrap(); - super::store_lib(&libs_p, &so_path, &id).await.unwrap(); + super::store_lib(&storage_manager, &so_path, &id) + .await + .unwrap(); // Old '.so' file gone? assert!(!so_path.exists()); assert_eq!( - fs::read_to_string(libs_p.join(id.to_string())) + fs::read_to_string(libs_p.join("shuttle-libs").join(id.to_string())) .await .unwrap(), "barfoo" diff --git a/deployer/src/deployment/run.rs b/deployer/src/deployment/run.rs index 2679ab2cc..65cea24f5 100644 --- a/deployer/src/deployment/run.rs +++ b/deployer/src/deployment/run.rs @@ -18,7 +18,10 @@ use tracing::{debug, debug_span, error, info, instrument, trace, Instrument}; use tracing_opentelemetry::OpenTelemetrySpanExt; use uuid::Uuid; -use super::{provisioner_factory, runtime_logger, KillReceiver, KillSender, RunReceiver, State}; +use super::{ + provisioner_factory, runtime_logger, storage_manager::StorageManager, KillReceiver, KillSender, + RunReceiver, State, +}; use crate::error::{Error, Result}; /// Run a task which takes runnable deploys from a channel and starts them up with a factory provided by the @@ -30,13 +33,10 @@ pub async fn task( abstract_factory: impl provisioner_factory::AbstractFactory, logger_factory: impl runtime_logger::Factory, active_deployment_getter: impl ActiveDeploymentsGetter, - artifacts_path: PathBuf, + storage_manager: StorageManager, ) { info!("Run task started"); - // The directory in which compiled '.so' files are stored. - let libs_path = artifacts_path.join("shuttle-libs"); - while let Some(built) = recv.recv().await { let id = built.id; @@ -44,6 +44,7 @@ pub async fn task( let kill_send = kill_send.clone(); let kill_recv = kill_send.subscribe(); + let storage_manager = storage_manager.clone(); let port = match pick_unused_port() { Some(port) => port, @@ -66,7 +67,12 @@ pub async fn task( } }; let mut factory = match abstract_factory - .get_factory(service_name, built.service_id) + .get_factory( + service_name, + built.service_id, + built.id, + storage_manager.clone(), + ) .await { Ok(factory) => factory, @@ -95,8 +101,6 @@ pub async fn task( Err(err) => start_crashed_cleanup(&id, err), }; - let libs_path = libs_path.clone(); - tokio::spawn(async move { let parent_cx = global::get_text_map_propagator(|propagator| { propagator.extract(&built.tracing_context) @@ -108,7 +112,7 @@ pub async fn task( if let Err(err) = built .handle( addr, - libs_path, + storage_manager, &mut factory, logger, kill_recv, @@ -197,12 +201,12 @@ pub struct Built { } impl Built { - #[instrument(skip(self, libs_path, factory, logger, kill_recv, kill_old_deployments, cleanup), fields(id = %self.id, state = %State::Loading))] + #[instrument(skip(self, storage_manager, factory, logger, kill_recv, kill_old_deployments, cleanup), fields(id = %self.id, state = %State::Loading))] #[allow(clippy::too_many_arguments)] async fn handle( self, address: SocketAddr, - libs_path: PathBuf, + storage_manager: StorageManager, factory: &mut dyn Factory, logger: Logger, kill_recv: KillReceiver, @@ -211,7 +215,8 @@ impl Built { + Send + 'static, ) -> Result<()> { - let service = load_deployment(&self.id, address, libs_path, factory, logger).await?; + let so_path = storage_manager.deployment_library_path(&self.id)?; + let service = load_deployment(address, so_path, factory, logger).await?; kill_old_deployments.await?; @@ -260,15 +265,13 @@ async fn run( } } -#[instrument(skip(id, addr, libs_path, factory, logger))] +#[instrument(skip(addr, so_path, factory, logger))] async fn load_deployment( - id: &Uuid, addr: SocketAddr, - libs_path: PathBuf, + so_path: PathBuf, factory: &mut dyn Factory, logger: Logger, ) -> Result { - let so_path = libs_path.join(id.to_string()); let loader = Loader::from_so_file(so_path)?; Ok(loader.load(factory, addr, logger).await?) @@ -278,7 +281,6 @@ async fn load_deployment( mod tests { use std::{ collections::BTreeMap, - fs, net::{Ipv4Addr, SocketAddr}, path::PathBuf, process::Command, @@ -287,6 +289,7 @@ mod tests { use shuttle_common::database; use shuttle_service::{Factory, Logger}; + use tempdir::TempDir; use tokio::{ sync::{broadcast, mpsc, oneshot}, task::JoinError, @@ -294,12 +297,11 @@ mod tests { }; use uuid::Uuid; - use crate::error::Error; + use crate::{deployment::storage_manager::StorageManager, error::Error}; use super::Built; const RESOURCES_PATH: &str = "tests/resources"; - const LIBS_PATH: &str = "/tmp/shuttle-libs-tests"; struct StubFactory; @@ -321,6 +323,21 @@ mod tests { fn get_service_name(&self) -> shuttle_service::ServiceName { panic!("no test should get the service name"); } + + fn get_build_path(&self) -> Result { + panic!("no test should get the build path"); + } + + fn get_storage_path(&self) -> Result { + panic!("no test should get the storage path"); + } + } + + fn get_storage_manager() -> StorageManager { + let tmp_dir = TempDir::new("shuttle_run_test").unwrap(); + let path = tmp_dir.into_path(); + + StorageManager::new(path) } fn get_logger(id: Uuid) -> Logger { @@ -342,7 +359,7 @@ mod tests { // This test uses the kill signal to make sure a service does stop when asked to #[tokio::test] async fn can_be_killed() { - let built = make_so_and_built("sleep-async"); + let (built, storage_manager) = make_so_and_built("sleep-async"); let id = built.id; let (kill_send, kill_recv) = broadcast::channel(1); let (cleanup_send, cleanup_recv) = oneshot::channel(); @@ -365,7 +382,7 @@ mod tests { built .handle( addr, - PathBuf::from(LIBS_PATH), + storage_manager, &mut factory, logger, kill_recv, @@ -390,7 +407,7 @@ mod tests { // This test does not use a kill signal to stop the service. Rather the service decided to stop on its own without errors #[tokio::test] async fn self_stop() { - let built = make_so_and_built("sleep-async"); + let (built, storage_manager) = make_so_and_built("sleep-async"); let (_kill_send, kill_recv) = broadcast::channel(1); let (cleanup_send, cleanup_recv) = oneshot::channel(); @@ -413,7 +430,7 @@ mod tests { built .handle( addr, - PathBuf::from(LIBS_PATH), + storage_manager, &mut factory, logger, kill_recv, @@ -432,7 +449,7 @@ mod tests { // Test for panics in Service::bind #[tokio::test] async fn panic_in_bind() { - let built = make_so_and_built("bind-panic"); + let (built, storage_manager) = make_so_and_built("bind-panic"); let (_kill_send, kill_recv) = broadcast::channel(1); let (cleanup_send, cleanup_recv): (oneshot::Sender<()>, _) = oneshot::channel(); @@ -455,7 +472,7 @@ mod tests { built .handle( addr, - PathBuf::from(LIBS_PATH), + storage_manager, &mut factory, logger, kill_recv, @@ -474,7 +491,7 @@ mod tests { // Test for panics in the main function #[tokio::test] async fn panic_in_main() { - let built = make_so_and_built("main-panic"); + let (built, storage_manager) = make_so_and_built("main-panic"); let (_kill_send, kill_recv) = broadcast::channel(1); let handle_cleanup = |_result| panic!("the service shouldn't even start"); @@ -485,7 +502,7 @@ mod tests { let result = built .handle( addr, - PathBuf::from(LIBS_PATH), + storage_manager, &mut factory, logger, kill_recv, @@ -513,13 +530,14 @@ mod tests { let handle_cleanup = |_result| panic!("no service means no cleanup"); let addr = SocketAddr::new(Ipv4Addr::LOCALHOST.into(), 8001); + let storage_manager = get_storage_manager(); let mut factory = StubFactory; let logger = get_logger(built.id); let result = built .handle( addr, - PathBuf::from(LIBS_PATH), + storage_manager, &mut factory, logger, kill_recv, @@ -538,7 +556,7 @@ mod tests { ); } - fn make_so_and_built(crate_name: &str) -> Built { + fn make_so_and_built(crate_name: &str) -> (Built, StorageManager) { let crate_dir: PathBuf = [RESOURCES_PATH, crate_name].iter().collect(); Command::new("cargo") @@ -559,18 +577,19 @@ mod tests { let id = Uuid::new_v4(); let so_path = crate_dir.join("target/release").join(lib_name); - let libs_path = PathBuf::from(LIBS_PATH); - fs::create_dir_all(&libs_path).unwrap(); - - let new_so_path = libs_path.join(id.to_string()); + let storage_manager = get_storage_manager(); + let new_so_path = storage_manager.deployment_library_path(&id).unwrap(); std::fs::copy(so_path, new_so_path).unwrap(); - Built { - id, - service_name: crate_name.to_string(), - service_id: Uuid::new_v4(), - tracing_context: Default::default(), - } + ( + Built { + id, + service_name: crate_name.to_string(), + service_id: Uuid::new_v4(), + tracing_context: Default::default(), + }, + storage_manager, + ) } } diff --git a/deployer/src/deployment/storage_manager.rs b/deployer/src/deployment/storage_manager.rs new file mode 100644 index 000000000..5a5fa1300 --- /dev/null +++ b/deployer/src/deployment/storage_manager.rs @@ -0,0 +1,69 @@ +use std::{fs, io, path::PathBuf}; + +use uuid::Uuid; + +/// Manager to take care of directories for storing project, services and deployment files +#[derive(Clone)] +pub struct StorageManager { + artifacts_path: PathBuf, +} + +impl StorageManager { + pub fn new(artifacts_path: PathBuf) -> Self { + Self { artifacts_path } + } + + /// Path of the directory that contains extracted service Cargo projects. + pub fn builds_path(&self) -> Result { + let builds_path = self.artifacts_path.join("shuttle-builds"); + fs::create_dir_all(&builds_path)?; + + Ok(builds_path) + } + + /// Path for a specific service + pub fn service_build_path>(&self, service_name: S) -> Result { + let builds_path = self.builds_path()?.join(service_name.as_ref()); + fs::create_dir_all(&builds_path)?; + + Ok(builds_path) + } + + /// The directory in which compiled '.so' files are stored. + pub fn libraries_path(&self) -> Result { + let libs_path = self.artifacts_path.join("shuttle-libs"); + fs::create_dir_all(&libs_path)?; + + Ok(libs_path) + } + + /// Path to `.so` for a service + pub fn deployment_library_path(&self, deployment_id: &Uuid) -> Result { + let library_path = self.libraries_path()?.join(deployment_id.to_string()); + + Ok(library_path) + } + + /// Path of the directory to store user files + pub fn storage_path(&self) -> Result { + let storage_path = self.artifacts_path.join("shuttle-storage"); + fs::create_dir_all(&storage_path)?; + + Ok(storage_path) + } + + /// Path to folder for storing deployment files + pub fn deployment_storage_path>( + &self, + service_name: S, + deployment_id: &Uuid, + ) -> Result { + let storage_path = self + .storage_path()? + .join(service_name.as_ref()) + .join(deployment_id.to_string()); + fs::create_dir_all(&storage_path)?; + + Ok(storage_path) + } +} diff --git a/e2e/tests/integration/helpers/mod.rs b/e2e/tests/integration/helpers/mod.rs index b8a0949f1..f888830c8 100644 --- a/e2e/tests/integration/helpers/mod.rs +++ b/e2e/tests/integration/helpers/mod.rs @@ -42,12 +42,17 @@ shuttle-service = {{ path = "{}" }} shuttle-aws-rds = {{ path = "{}" }} shuttle-persist = {{ path = "{}" }} shuttle-shared-db = {{ path = "{}" }} -shuttle-secrets = {{ path = "{}" }}"#, +shuttle-secrets = {{ path = "{}" }} +shuttle-static-folder = {{ path = "{}" }}"#, WORKSPACE_ROOT.join("service").display(), WORKSPACE_ROOT.join("resources").join("aws-rds").display(), WORKSPACE_ROOT.join("resources").join("persist").display(), WORKSPACE_ROOT.join("resources").join("shared-db").display(), WORKSPACE_ROOT.join("resources").join("secrets").display(), + WORKSPACE_ROOT + .join("resources") + .join("static-folder") + .display(), ) .unwrap(); diff --git a/resources/static-folder/Cargo.toml b/resources/static-folder/Cargo.toml new file mode 100644 index 000000000..1eb75377a --- /dev/null +++ b/resources/static-folder/Cargo.toml @@ -0,0 +1,17 @@ +[package] +name = "shuttle-static-folder" +version = "0.7.3" +edition = "2021" +license = "Apache-2.0" +description = "Plugin to get a static folder at runtime on shuttle" +keywords = ["shuttle-service", "static-folder"] +# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html + +[dependencies] +async-trait = "0.1.56" +shuttle-service = { path = "../../service", version = "0.7.2", default-features = false } +tokio = { version = "1.19.2", features = ["rt"] } + +[dev-dependencies] +tempdir = "0.3.7" +tokio = { version = "1.19.2", features = ["macros"] } diff --git a/resources/static-folder/README.md b/resources/static-folder/README.md new file mode 100644 index 000000000..ae28f8e76 --- /dev/null +++ b/resources/static-folder/README.md @@ -0,0 +1,7 @@ +# Shuttle Static Folder +This plugin allows services to get the path to a static folder at runtime + +## Usage +Add `shuttle-static-folder` to the dependencies for your service. This resource can be using by the `shuttle_static_folder::StaticFolder` attribute to get a `PathBuf` with the location of the static folder. + +An example using the Axum framework can be found on [GitHub](https://github.com/shuttle-hq/examples/tree/main/axum/websocket) diff --git a/resources/static-folder/src/lib.rs b/resources/static-folder/src/lib.rs new file mode 100644 index 000000000..6cc620c04 --- /dev/null +++ b/resources/static-folder/src/lib.rs @@ -0,0 +1,111 @@ +use async_trait::async_trait; +use shuttle_service::{Factory, ResourceBuilder}; +use std::{fs::rename, path::PathBuf}; +use tokio::runtime::Runtime; + +pub struct StaticFolder; + +#[async_trait] +impl ResourceBuilder for StaticFolder { + fn new() -> Self { + Self {} + } + + async fn build( + self, + factory: &mut dyn Factory, + _runtime: &Runtime, + ) -> Result { + let input_dir = factory.get_build_path()?.join("static"); + let output_dir = factory.get_storage_path()?.join("static"); + + rename(input_dir, output_dir.clone())?; + + Ok(output_dir) + } +} + +#[cfg(test)] +mod tests { + use std::fs::{self}; + + use async_trait::async_trait; + use shuttle_service::{Factory, ResourceBuilder}; + use tempdir::TempDir; + + use crate::StaticFolder; + + struct MockFactory { + build_path: TempDir, + storage_path: TempDir, + } + + impl MockFactory { + fn new() -> Self { + Self { + build_path: TempDir::new("build").unwrap(), + storage_path: TempDir::new("storage").unwrap(), + } + } + } + + #[async_trait] + impl Factory for MockFactory { + async fn get_db_connection_string( + &mut self, + _db_type: shuttle_service::database::Type, + ) -> Result { + panic!("no static folder test should try to get a db connection string") + } + + async fn get_secrets( + &mut self, + ) -> Result, shuttle_service::Error> { + panic!("no static folder test should try to get secrets") + } + + fn get_service_name(&self) -> shuttle_service::ServiceName { + panic!("no static folder test should try to get the service name") + } + + fn get_build_path(&self) -> Result { + Ok(self.build_path.path().to_owned()) + } + + fn get_storage_path(&self) -> Result { + Ok(self.storage_path.path().to_owned()) + } + } + + #[tokio::test] + async fn copies_folder() { + let mut factory = MockFactory::new(); + + let input_file_path = factory.build_path.path().join("static").join("note.txt"); + fs::create_dir_all(input_file_path.parent().unwrap()).unwrap(); + fs::write(input_file_path, "Hello, test!").unwrap(); + + let expected_file = factory.storage_path.path().join("static").join("note.txt"); + assert!(!expected_file.exists(), "input file should not exist yet"); + + // Call plugin + let static_folder = StaticFolder; + + let runtime = tokio::runtime::Runtime::new().unwrap(); + let actual_folder = static_folder.build(&mut factory, &runtime).await.unwrap(); + + assert_eq!( + actual_folder, + factory.storage_path.path().join("static"), + "expect path to the static folder" + ); + assert!(expected_file.exists(), "expected input file to be created"); + assert_eq!( + fs::read_to_string(expected_file).unwrap(), + "Hello, test!", + "expected file content to match" + ); + + runtime.shutdown_background(); + } +} diff --git a/service/src/lib.rs b/service/src/lib.rs index a060a0dfd..770808703 100644 --- a/service/src/lib.rs +++ b/service/src/lib.rs @@ -213,6 +213,7 @@ use std::collections::BTreeMap; use std::future::Future; use std::net::SocketAddr; +use std::path::PathBuf; use std::pin::Pin; pub use async_trait::async_trait; @@ -312,6 +313,12 @@ pub trait Factory: Send + Sync { /// Get the name for the service being deployed fn get_service_name(&self) -> ServiceName; + + /// Get the path where the build files are stored for this service + fn get_build_path(&self) -> Result; + + /// Get the path where files can be stored for this deployment + fn get_storage_path(&self) -> Result; } /// Used to get resources of type `T` from factories. diff --git a/service/tests/integration/loader.rs b/service/tests/integration/loader.rs index 352b5edf4..ba42c3807 100644 --- a/service/tests/integration/loader.rs +++ b/service/tests/integration/loader.rs @@ -60,6 +60,14 @@ impl Factory for DummyFactory { async fn get_secrets(&mut self) -> Result, Error> { panic!("did not expect any loader test to get secrets") } + + fn get_build_path(&self) -> Result { + panic!("did not expect any loader test to get the build path") + } + + fn get_storage_path(&self) -> Result { + panic!("did not expect any loader test to get the storage path") + } } #[test]