This repository has been archived by the owner on Dec 7, 2023. It is now read-only.
-
Notifications
You must be signed in to change notification settings - Fork 3
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
feat(server): initial release of automaat-server
> An Automaat server implementation. > > This server performs several important tasks in the Automaat workflow: > > * Use a database to store the state of the Automaat instance. > * Expose a GraphQL API to fetch and create tasks > * Adds some abstractions (such as "pipelines") for ease of use. > * Provide a task runner system. > > By combining this server with `automaat-web-client`, you can have your > own Automaat instance running in your environment. Note that this implementation still requires a lot of work, but as a first MVP, it gets the job done.
- Loading branch information
Showing
28 changed files
with
2,221 additions
and
4 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,64 @@ | ||
[package] | ||
name = "automaat-server" | ||
version = "0.1.0" | ||
authors = ["Jean Mertz <[email protected]>"] | ||
license = "MIT/Apache-2.0" | ||
description = "HTTP API for the Automaat automation utility." | ||
repository = "https://github.com/blendle/automaat" | ||
keywords = ["automaat", "automation"] | ||
categories = ["command-line-utilities"] | ||
readme = "README.md" | ||
include = ["Cargo.toml", "src/**/*.rs", "migrations/**/*.sql", "README.md", "LICENSE-APACHE", "LICENSE-MIT"] | ||
edition = "2018" | ||
|
||
[badges] | ||
travis-ci = { repository = "blendle/automaat" } | ||
|
||
[dependencies] | ||
automaat-core = { version = "0.1", path = "../core" } | ||
chrono = { version = "0.4", features = ["serde"] } | ||
diesel = { version = "1.4", default-features = false, features = ["postgres", "r2d2", "chrono", "serde_json"] } | ||
diesel-derive-enum = { version = "0.4", features = ["postgres"] } | ||
diesel_migrations = "1.4" | ||
juniper = { version = "0.12", features = ["chrono"] } | ||
juniper_rocket = "0.3" | ||
juniper_serde = "0.1" | ||
paste = "0.1" | ||
rocket = "0.4" | ||
rocket_contrib = { version = "0.4", default_features = false, features = ["diesel_postgres_pool", "json"] } | ||
rocket_cors = "0.5.0" | ||
serde = { version = "1.0", default-features = false, features = ["derive"] } | ||
serde_json = "1.0" | ||
|
||
[dependencies.processor-git-clone-v1] | ||
package = "automaat-processor-git-clone" | ||
version = "0.1" | ||
path = "../processors/git-clone" | ||
features = ["juniper"] | ||
|
||
[dependencies.processor-print-output-v1] | ||
package = "automaat-processor-print-output" | ||
version = "0.1" | ||
path = "../processors/print-output" | ||
features = ["juniper"] | ||
|
||
[dependencies.processor-redis-command-v1] | ||
package = "automaat-processor-redis-command" | ||
version = "0.1" | ||
path = "../processors/redis-command" | ||
features = ["juniper"] | ||
|
||
[dependencies.processor-shell-command-v1] | ||
package = "automaat-processor-shell-command" | ||
version = "0.1" | ||
path = "../processors/shell-command" | ||
features = ["juniper"] | ||
|
||
[dependencies.processor-string-regex-v1] | ||
package = "automaat-processor-string-regex" | ||
version = "0.1" | ||
path = "../processors/string-regex" | ||
features = ["juniper"] | ||
|
||
[dev-dependencies] | ||
version-sync = "0.8" |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1 @@ | ||
../../LICENSE-APACHE |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1 @@ | ||
../../LICENSE-MIT |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,21 @@ | ||
# Automaat Server | ||
|
||
🚧 _Work In Progress_ 🚧 | ||
|
||
## Database Configuration | ||
|
||
You need to set the following environment variables: | ||
|
||
```shell | ||
# needed for the Rocket webserver to connect | ||
ROCKET_DATABASES='{db={url="postgres://postgres@localhost"}}' | ||
|
||
# needed for the Diesel ORM to run migrations | ||
DATABASE_URL="postgres://postgres@localhost" | ||
``` | ||
|
||
## See Also | ||
|
||
* [Rocket environment variables](https://rocket.rs/v0.4/guide/configuration/#environment-variables) | ||
* [Rocket database configuration](https://api.rocket.rs/v0.4/rocket_contrib/databases/index.html#configuration) | ||
* [Deisel getting started](https://diesel.rs/guides/getting-started/) |
Empty file.
1 change: 1 addition & 0 deletions
1
src/server/migrations/2019-05-07-162759_create_pipelines_table/down.sql
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1 @@ | ||
DROP TABLE pipelines; |
5 changes: 5 additions & 0 deletions
5
src/server/migrations/2019-05-07-162759_create_pipelines_table/up.sql
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,5 @@ | ||
CREATE TABLE pipelines ( | ||
id Serial PRIMARY KEY, | ||
name VarChar NOT NULL UNIQUE, | ||
description Text NULL | ||
); |
1 change: 1 addition & 0 deletions
1
src/server/migrations/2019-05-07-162809_create_steps_table/down.sql
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1 @@ | ||
DROP TABLE steps; |
12 changes: 12 additions & 0 deletions
12
src/server/migrations/2019-05-07-162809_create_steps_table/up.sql
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,12 @@ | ||
CREATE TABLE steps ( | ||
id Serial PRIMARY KEY, | ||
name VarChar NOT NULL, | ||
description Text NULL, | ||
processor Jsonb NOT NULL, | ||
position Integer NOT NULL, | ||
pipeline_id Integer REFERENCES pipelines ON DELETE CASCADE, | ||
|
||
UNIQUE(position, pipeline_id) | ||
); | ||
|
||
CREATE INDEX ON steps (pipeline_id); |
1 change: 1 addition & 0 deletions
1
src/server/migrations/2019-05-09-135514_create_variables_table/down.sql
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1 @@ | ||
DROP TABLE variables; |
8 changes: 8 additions & 0 deletions
8
src/server/migrations/2019-05-09-135514_create_variables_table/up.sql
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,8 @@ | ||
CREATE TABLE variables ( | ||
id Serial PRIMARY KEY, | ||
key VarChar NOT NULL, | ||
description Text NULL, | ||
pipeline_id Integer REFERENCES pipelines ON DELETE CASCADE | ||
); | ||
|
||
CREATE INDEX ON variables (pipeline_id); |
2 changes: 2 additions & 0 deletions
2
src/server/migrations/2019-05-09-141151_create_tasks_table/down.sql
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,2 @@ | ||
DROP TABLE tasks; | ||
DROP TYPE IF EXISTS TaskStatus; |
13 changes: 13 additions & 0 deletions
13
src/server/migrations/2019-05-09-141151_create_tasks_table/up.sql
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,13 @@ | ||
CREATE TYPE TaskStatus AS ENUM ('scheduled', 'pending', 'running', 'failed', 'ok'); | ||
|
||
CREATE TABLE tasks ( | ||
id Serial PRIMARY KEY, | ||
name VarChar NOT NULL, | ||
description Text NULL, | ||
status TaskStatus NOT NULL, | ||
pipeline_reference Integer REFERENCES pipelines ON DELETE SET NULL, | ||
|
||
UNIQUE(name, pipeline_reference) | ||
); | ||
|
||
CREATE INDEX ON tasks (pipeline_reference); |
2 changes: 2 additions & 0 deletions
2
src/server/migrations/2019-05-09-141606_create_task_steps_table/down.sql
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,2 @@ | ||
DROP TABLE task_steps; | ||
DROP TYPE IF EXISTS TaskStepStatus; |
16 changes: 16 additions & 0 deletions
16
src/server/migrations/2019-05-09-141606_create_task_steps_table/up.sql
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,16 @@ | ||
CREATE TYPE TaskStepStatus AS ENUM ('initialized', 'pending', 'running', 'failed', 'cancelled', 'ok'); | ||
|
||
CREATE TABLE task_steps ( | ||
id Serial PRIMARY KEY, | ||
name VarChar NOT NULL, | ||
description Text NULL, | ||
processor Jsonb NOT NULL, | ||
position Integer NOT NULL, | ||
started_at Timestamp NULL, | ||
finished_at Timestamp NULL, | ||
status TaskStepStatus NOT NULL DEFAULT 'pending', | ||
output Text NULL, | ||
task_id Integer REFERENCES tasks ON DELETE CASCADE | ||
); | ||
|
||
CREATE INDEX ON task_steps (task_id); |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,121 @@ | ||
use crate::resources::{ | ||
CreatePipelineInput, CreateTaskFromPipelineInput, NewPipeline, NewTask, Pipeline, | ||
SearchPipelineInput, Task, VariableValue, | ||
}; | ||
use crate::Database; | ||
use diesel::prelude::*; | ||
use juniper::{object, Context, FieldError, FieldResult, RootNode, ID}; | ||
use std::convert::TryFrom; | ||
|
||
impl Context for Database {} | ||
|
||
pub(crate) type Schema = RootNode<'static, QueryRoot, MutationRoot>; | ||
pub(crate) struct QueryRoot; | ||
pub(crate) struct MutationRoot; | ||
|
||
#[object(Context = Database)] | ||
impl QueryRoot { | ||
/// Return a list of pipelines. | ||
/// | ||
/// You can optionally filter the returned set of pipelines by providing the | ||
/// `SearchPipelineInput` value. | ||
fn pipelines( | ||
context: &Database, | ||
search: Option<SearchPipelineInput>, | ||
) -> FieldResult<Vec<Pipeline>> { | ||
use crate::schema::pipelines::dsl::*; | ||
let conn = &context.0; | ||
|
||
let mut query = pipelines.order(id).into_boxed(); | ||
|
||
if let Some(search) = &search { | ||
if let Some(search_name) = &search.name { | ||
query = query.filter(name.ilike(format!("%{}%", search_name))); | ||
}; | ||
|
||
if let Some(search_description) = &search.description { | ||
query = query.or_filter(description.ilike(format!("%{}%", search_description))); | ||
}; | ||
}; | ||
|
||
query.load(conn).map_err(Into::into) | ||
} | ||
|
||
/// Return a list of tasks. | ||
fn tasks(context: &Database) -> FieldResult<Vec<Task>> { | ||
use crate::schema::tasks::dsl::*; | ||
|
||
tasks.order(id).load(&**context).map_err(Into::into) | ||
} | ||
|
||
/// Return a single pipeline, based on the pipeline ID. | ||
/// | ||
/// This query can return `null` if no pipeline is found matching the | ||
/// provided ID. | ||
fn pipeline(context: &Database, id: ID) -> FieldResult<Option<Pipeline>> { | ||
use crate::schema::pipelines::dsl::{id as pid, pipelines}; | ||
|
||
pipelines | ||
.filter(pid.eq(id.parse::<i32>()?)) | ||
.first(&**context) | ||
.optional() | ||
.map_err(Into::into) | ||
} | ||
|
||
/// Return a single task, based on the task ID. | ||
/// | ||
/// This query can return `null` if no task is found matching the | ||
/// provided ID. | ||
fn task(context: &Database, id: ID) -> FieldResult<Option<Task>> { | ||
use crate::schema::tasks::dsl::{id as tid, tasks}; | ||
|
||
tasks | ||
.filter(tid.eq(id.parse::<i32>()?)) | ||
.first(&**context) | ||
.optional() | ||
.map_err(Into::into) | ||
} | ||
} | ||
|
||
#[object(Context = Database)] | ||
impl MutationRoot { | ||
/// Create a new pipeline. | ||
fn createPipeline(context: &Database, pipeline: CreatePipelineInput) -> FieldResult<Pipeline> { | ||
NewPipeline::try_from(&pipeline)? | ||
.create(context) | ||
.map_err(Into::into) | ||
} | ||
|
||
/// Create a task from an existing pipeline ID. | ||
/// | ||
/// Once the task is created, it will be scheduled to run immediately. | ||
fn createTaskFromPipeline( | ||
context: &Database, | ||
task: CreateTaskFromPipelineInput, | ||
) -> FieldResult<Task> { | ||
let pipeline: Pipeline = { | ||
use crate::schema::pipelines::dsl::*; | ||
|
||
pipelines | ||
.filter(id.eq(task.pipeline_id.parse::<i32>()?)) | ||
.first(&**context) | ||
}?; | ||
|
||
let variable_values = task | ||
.variables | ||
.into_iter() | ||
.map(Into::into) | ||
.collect::<Vec<VariableValue>>(); | ||
|
||
if let Some(variable) = pipeline.get_missing_variable(context, &variable_values)? { | ||
return Err(format!(r#"missing variable: "{}""#, variable.key).into()); | ||
}; | ||
|
||
let mut new_task = NewTask::create_from_pipeline(context, &pipeline, &variable_values) | ||
.map_err(Into::<FieldError>::into)?; | ||
|
||
// TODO: when we have scheduling, we probably want this to be optional, | ||
// so that a task isn't always scheduled instantly. | ||
new_task.enqueue(context).map_err(Into::into) | ||
} | ||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,67 @@ | ||
#![allow(clippy::single_match_else)] | ||
|
||
use crate::graphql::Schema; | ||
use crate::Database; | ||
use juniper_rocket::{graphiql_source, playground_source, GraphQLRequest, GraphQLResponse}; | ||
use rocket::response::content::Html; | ||
use rocket::State; | ||
use rocket_contrib::json::Json; | ||
use serde::{Deserialize, Serialize}; | ||
|
||
/// See: <https://tools.ietf.org/html/draft-inadarei-api-health-check-03> | ||
#[derive(Debug, Deserialize, Serialize)] | ||
#[serde(rename_all = "lowercase")] | ||
pub(crate) enum Status { | ||
Pass, | ||
_Warn, | ||
_Fail, | ||
} | ||
|
||
#[derive(Debug, Deserialize, Serialize)] | ||
#[serde(rename_all(serialize = "camelCase"))] | ||
pub(crate) struct Health { | ||
status: Status, | ||
version: &'static str, | ||
release_id: &'static str, | ||
} | ||
|
||
#[get("/graphql/graphiql")] | ||
pub(super) fn graphiql() -> Html<String> { | ||
graphiql_source("/graphql") | ||
} | ||
|
||
#[get("/graphql/playground")] | ||
pub(super) fn playground() -> Html<String> { | ||
playground_source("/graphql") | ||
} | ||
|
||
#[get("/graphql?<request>")] | ||
#[allow(clippy::needless_pass_by_value)] | ||
pub(super) fn query( | ||
db: Database, | ||
request: GraphQLRequest, | ||
schema: State<'_, Schema>, | ||
) -> GraphQLResponse { | ||
request.execute(&schema, &db) | ||
} | ||
|
||
#[post("/graphql", data = "<request>")] | ||
#[allow(clippy::needless_pass_by_value)] | ||
pub(super) fn mutate( | ||
db: Database, | ||
request: GraphQLRequest, | ||
schema: State<'_, Schema>, | ||
) -> GraphQLResponse { | ||
request.execute(&schema, &db) | ||
} | ||
|
||
#[get("/health")] | ||
pub(super) const fn health() -> Json<Health> { | ||
let health = Health { | ||
status: Status::Pass, | ||
version: "TODO", | ||
release_id: "TODO", | ||
}; | ||
|
||
Json(health) | ||
} |
Oops, something went wrong.