Skip to content

update permisson check util #654

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 2 commits into from
Oct 24, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -41,16 +41,16 @@ serde_json = "1.0.132"
tracing = "0.1.40"
tracing-subscriber = "0.3.18"
tracing-appender = "0.2"
thiserror = "1.0.64"
thiserror = "1.0.65"
rand = "0.8.5"
smallvec = "1.13.2"
tokio = "1.40.0"
tokio = "1.41.0"
tokio-stream = "0.1.16"
tokio-test = "0.4.4"
clap = "4.5.20"
async-trait = "0.1.83"
async-stream = "0.3.6"
bytes = "1.7.2"
bytes = "1.8.0"
memchr = "2.7.4"
chrono = "0.4.38"
sha1 = "0.10.6"
Expand Down
2 changes: 1 addition & 1 deletion docker/mono-pg-dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,6 @@ EXPOSE 5432

# Add the database initialization script to the container
# When the container starts, PostgreSQL will automatically execute all .sql files in the docker-entrypoint-initdb.d/ directory
COPY ./sql/postgres/pg_20240923__init.sql /docker-entrypoint-initdb.d/
COPY ./sql/postgres/pg_20241023__init.sql /docker-entrypoint-initdb.d/

CMD ["postgres"]
2 changes: 1 addition & 1 deletion jupiter/src/storage/init.rs
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ async fn setup_sql(conn: &DatabaseConnection) -> Result<(), TransactionError<DbE
let backend = txn.get_database_backend();

// `include_str!` will expand the file while compiling, so `.sql` is not needed after that
const SETUP_SQL: &str = include_str!("../../../sql/sqlite/sqlite_20240923_init.sql");
const SETUP_SQL: &str = include_str!("../../../sql/sqlite/sqlite_20241023_init.sql");
txn.execute(Statement::from_string(backend, SETUP_SQL)).await?;
Ok(())
})
Expand Down
3 changes: 0 additions & 3 deletions mono/src/api/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -114,11 +114,8 @@ pub mod util {
state: State<MonoApiServiceState>,
) -> Result<(), saturn::context::Error> {
let entities = get_entitystore(path.into(), state).await;
let crate_root = env!("CARGO_MANIFEST_DIR");
let cedar_context = CedarContext::new(
entities,
format!("{}/../saturn/mega.cedarschema", crate_root),
format!("{}/../saturn/mega_policies.cedar", crate_root),
)
.unwrap();
cedar_context.is_authorized(
Expand Down
24 changes: 10 additions & 14 deletions mono/src/api/mr_router.rs
Original file line number Diff line number Diff line change
Expand Up @@ -37,25 +37,21 @@ async fn merge(
let storage = state.context.services.mono_storage.clone();
if let Some(model) = storage.get_open_mr_by_link(&mr_link).await.unwrap() {
let path = model.path.clone();
if util::check_permissions(
let _ = util::check_permissions(
&user.name,
// "admin",
&path,
ActionEnum::ApproveMergeRequest,
state.clone(),
)
.await
.is_ok()
{
ApiRequestEvent::notify(ApiType::MergeRequest, &state.0.context.config);
let res = state.monorepo().merge_mr(&mut model.into()).await;
let res = match res {
Ok(_) => CommonResult::success(None),
Err(err) => CommonResult::failed(&err.to_string()),
};
ApiRequestEvent::notify(ApiType::MergeDone, &state.0.context.config);
return Ok(Json(res));
}
.await;
ApiRequestEvent::notify(ApiType::MergeRequest, &state.0.context.config);
let res = state.monorepo().merge_mr(&mut model.into()).await;
let res = match res {
Ok(_) => CommonResult::success(None),
Err(err) => CommonResult::failed(&err.to_string()),
};
ApiRequestEvent::notify(ApiType::MergeDone, &state.0.context.config);
return Ok(Json(res));
}
Ok(Json(CommonResult::failed("not found")))
}
Expand Down
6 changes: 3 additions & 3 deletions moon/src/app/(dashboard)/mr/page.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -66,15 +66,15 @@ export default function MergeRequestPage() {
const getDescription = (item: MrInfoItem) => {
switch (item.status) {
case 'open':
return `MergeRequest opened on ${format(fromUnixTime(Number(item.open_timestamp)), 'MMM d')} by Admin`;
return `MergeRequest opened by Admin ${formatDistance(fromUnixTime(item.open_timestamp), new Date(), { addSuffix: true })} `;
case 'merged':
if (item.merge_timestamp !== null) {
return `MergeRequest by Admin was merged ${formatDistance(fromUnixTime(item.merge_timestamp), new Date(), { addSuffix: true })}`;
return `MergeRequest merged by Admin ${formatDistance(fromUnixTime(item.merge_timestamp), new Date(), { addSuffix: true })}`;
} else {
return "";
}
case 'closed':
return (`MR ${item.mr_link} by Admin was closed ${formatDistance(fromUnixTime(item.updated_at), new Date(), { addSuffix: true })}`)
return (`MR ${item.mr_link} closed by Admin ${formatDistance(fromUnixTime(item.updated_at), new Date(), { addSuffix: true })}`)
}
}

Expand Down
14 changes: 4 additions & 10 deletions saturn/src/context.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@ use cedar_policy::{
PolicySetError, Request, Schema, SchemaError, ValidationMode, Validator,
};
use itertools::Itertools;
use std::path::PathBuf;
use thiserror::Error;

use crate::{entitystore::EntityStore, util::EntityUid};
Expand Down Expand Up @@ -46,16 +45,11 @@ pub enum Error {
impl CedarContext {
pub fn new(
entities: EntityStore,
schema_path: impl Into<PathBuf>,
policies_path: impl Into<PathBuf>,
) -> Result<Self, ContextError> {
let schema_path = schema_path.into();
let policies_path = policies_path.into();

let schema_file = std::fs::File::open(schema_path)?;
let (schema, _) = Schema::from_cedarschema_file(schema_file).unwrap();
let policy_src = std::fs::read_to_string(policies_path)?;
let policies = policy_src.parse()?;
let schema_content = include_str!("../mega.cedarschema");
let policy_content = include_str!("../mega_policies.cedar");
let (schema, _) = Schema::from_cedarschema_str(schema_content).unwrap();
let policies = policy_content.parse()?;
let validator = Validator::new(schema.clone());
let output = validator.validate(&policies, ValidationMode::default());

Expand Down
2 changes: 1 addition & 1 deletion saturn/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,7 @@ mod test {
}

fn load_context(entities: EntityStore) -> CedarContext {
CedarContext::new(entities, "./mega.cedarschema", "./mega_policies.cedar").unwrap()
CedarContext::new(entities).unwrap()
}

#[test]
Expand Down
24 changes: 10 additions & 14 deletions saturn/src/objects.rs
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ impl From<User> for Entity {
}

#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct UserGroup {
pub struct UserGroup {
euid: EntityUid,
parents: HashSet<EntityUid>,
}
Expand Down Expand Up @@ -63,7 +63,7 @@ impl From<Repo> for Entity {
(
"readers",
format!("{}", value.readers.as_ref()).parse().unwrap(),
)
),
]
.into_iter()
.map(|(x, v)| (x.into(), v))
Expand All @@ -84,12 +84,10 @@ pub struct MergeRequest {

impl From<MergeRequest> for Entity {
fn from(value: MergeRequest) -> Entity {
let attrs = [
("repo", format!("{}", value.repo.as_ref()).parse().unwrap()),
]
.into_iter()
.map(|(x, v)| (x.into(), v))
.collect();
let attrs = [("repo", format!("{}", value.repo.as_ref()).parse().unwrap())]
.into_iter()
.map(|(x, v)| (x.into(), v))
.collect();

Entity::new(
value.euid.into(),
Expand All @@ -109,12 +107,10 @@ pub struct Issue {

impl From<Issue> for Entity {
fn from(value: Issue) -> Entity {
let attrs = [
("repo", format!("{}", value.repo.as_ref()).parse().unwrap()),
]
.into_iter()
.map(|(x, v)| (x.into(), v))
.collect();
let attrs = [("repo", format!("{}", value.repo.as_ref()).parse().unwrap())]
.into_iter()
.map(|(x, v)| (x.into(), v))
.collect();

Entity::new(
value.euid.into(),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ CREATE TABLE IF NOT EXISTS "mega_tag" (
CREATE TABLE IF NOT EXISTS "mega_mr" (
"id" BIGINT PRIMARY KEY,
"mr_link" VARCHAR(40) NOT NULL,
"title" VARCHAR(80) NOT NULL,
"title" TEXT NOT NULL,
"merge_date" TIMESTAMP,
"status" VARCHAR(20) NOT NULL,
"path" TEXT NOT NULL,
Expand Down Expand Up @@ -291,4 +291,14 @@ CREATE TABLE IF NOT EXISTS "ssh_keys" (
"created_at" TIMESTAMP NOT NULL
);
CREATE INDEX "idx_user_id" ON "ssh_keys" ("user_id");
CREATE INDEX "idx_ssh_key_finger" ON "ssh_keys" ((left(finger, 8)));
CREATE INDEX "idx_ssh_key_finger" ON "ssh_keys" ((left(finger, 8)));


CREATE TABLE IF NOT EXISTS "access_token" (
"id" BIGINT PRIMARY KEY,
"user_id" BIGINT NOT NULL,
"token" TEXT NOT NULL,
"created_at" TIMESTAMP NOT NULL
);
CREATE INDEX "idx_token_user_id" ON "access_token" ("user_id");
CREATE INDEX "idx_token" ON "access_token" ((left(token, 8)));
Original file line number Diff line number Diff line change
Expand Up @@ -290,4 +290,13 @@ CREATE TABLE IF NOT EXISTS "ssh_keys" (
"created_at" TIMESTAMP NOT NULL
);
CREATE INDEX "idx_user_id" ON "ssh_keys" ("user_id");
CREATE INDEX "idx_ssh_key_finger" ON "ssh_keys" ("finger");
CREATE INDEX "idx_ssh_key_finger" ON "ssh_keys" ("finger");

CREATE TABLE IF NOT EXISTS "access_token" (
"id" BIGINT PRIMARY KEY,
"user_id" BIGINT NOT NULL,
"token" TEXT NOT NULL,
"created_at" TIMESTAMP NOT NULL
);
CREATE INDEX "idx_token_user_id" ON "access_token" ("user_id");
CREATE INDEX "idx_token" ON "access_token" ("token");
Loading