Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -17,3 +17,4 @@ Cargo.lock
*.docker-compose.yml
targets.json

logs/*
3 changes: 3 additions & 0 deletions .vscode/settings.json
Original file line number Diff line number Diff line change
@@ -1,5 +1,8 @@
{
"rust-analyzer.rustfmt.extraArgs": [
"+nightly"
],
"cSpell.words": [
"chrono"
]
}
7 changes: 7 additions & 0 deletions bin/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,9 @@ tracing-subscriber.workspace = true
# misc
clap.workspace = true

#chrono
chrono = "0.4.38"

[[bin]]
name = "commit-boost"
path = "commit_boost.rs"
Expand All @@ -40,3 +43,7 @@ path = "default_pbs.rs"
[[bin]]
name = "signer-module"
path = "signer_module.rs"

[[bin]]
name = "logger"
path = "logger.rs"
4 changes: 4 additions & 0 deletions bin/default_pbs.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
use cb_common::{config::load_pbs_config, utils::initialize_tracing_log};
use cb_logging::initialize_logging;
use cb_pbs::{DefaultBuilderApi, PbsService, PbsState};

#[tokio::main]
Expand All @@ -12,6 +13,9 @@ async fn main() {

// TODO: handle errors
let pbs_config = load_pbs_config().expect("failed to load pbs config");

initialize_logging(pbs_config);

let state = PbsState::<()>::new(pbs_config);

PbsService::init_metrics();
Expand Down
128 changes: 128 additions & 0 deletions bin/logger.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,128 @@
use chrono::{Utc, Duration as ChronoDuration};
use std::fs::{self, OpenOptions};
use std::io::{BufWriter, copy};
use tokio::time::{sleep, Duration};
use std::process::{Command, Stdio};
use tokio::task;
use std::time::SystemTime;
use cb_common::config::load_logger_config;
use std::os::unix::fs::PermissionsExt;

#[tokio::main]
async fn main() {
let logger_config = load_logger_config().expect("failed to load logger config");

let retention_duration = ChronoDuration::days(logger_config.retention_period_days.try_into().unwrap());
let collection_interval = Duration::from_secs(logger_config.log_collection_interval_secs);
let log_dir = logger_config.log_dir.clone();

// Ensure the log directory exists and has the correct permissions
if let Err(e) = fs::create_dir_all(&log_dir) {
eprintln!("Failed to create log directory: {}", e);
return;
}

if let Err(e) = fs::set_permissions(&log_dir, fs::Permissions::from_mode(0o775)) {
eprintln!("Failed to set permissions for log directory: {}", e);
return;
}

// Clone log_dir for use in the retention task
let log_dir_retention = log_dir.clone();
let retention_handle = task::spawn(async move {
loop {
sleep(Duration::from_secs(86400)).await; // Sleep for one day
let cutoff = Utc::now() - retention_duration;
let cutoff_system_time = SystemTime::from(cutoff);

if let Ok(entries) = fs::read_dir(&log_dir_retention) {
for entry in entries {
if let Ok(entry) = entry {
if let Ok(metadata) = entry.metadata() {
if let Ok(modified) = metadata.modified() {
if modified < cutoff_system_time {
let _ = fs::remove_file(entry.path());
}
}
}
}
}
}
}
});

// Clone log_dir for use in the collection task
let log_dir_collection = log_dir.clone();
let collection_handle = task::spawn(async move {
let mut last_log_file_name = String::new();

loop {
let now = Utc::now();
let log_file_name = format!("{}/log_{}.log", log_dir_collection, now.format("%Y-%m-%d"));

// Check if the log file name needs to be updated
if log_file_name != last_log_file_name {
last_log_file_name = log_file_name.clone();
}

// Check if the directory exists and create if necessary
if let Err(e) = fs::create_dir_all(&log_dir_collection) {
eprintln!("Failed to create log directory: {}", e);
continue;
}

let file = match OpenOptions::new().append(true).create(true).open(&log_file_name) {
Ok(file) => file,
Err(e) => {
eprintln!("Failed to open log file: {}", e);
continue;
}
};

let mut writer = BufWriter::new(file);
let since_time = format!("{}", now.format("%Y-%m-%dT%H:%M:%SZ"));

let mut child = match Command::new("docker")
.arg("compose")
.arg("-f")
.arg(&logger_config.compose_path)
.arg("logs")
.arg("--since")
.arg(&since_time) // Collect logs until the current time
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.spawn() {
Ok(child) => child,
Err(e) => {
eprintln!("Failed to start docker-compose logs command: {}", e);
continue;
}
};

if let Some(mut stdout) = child.stdout.take() {
if let Err(e) = copy(&mut stdout, &mut writer) {
eprintln!("Failed to write stdout to log file: {}", e);
}
}

if let Some(mut stderr) = child.stderr.take() {
if let Err(e) = copy(&mut stderr, &mut writer) {
eprintln!("Failed to write stderr to log file: {}", e);
}
}

sleep(collection_interval).await;
}
});

// Use tokio::join! to wait for both tasks to complete and handle results
let (retention_result, collection_result) = tokio::join!(retention_handle, collection_handle);

if let Err(e) = retention_result {
eprintln!("Log retention task failed: {}", e);
}

if let Err(e) = collection_result {
eprintln!("Log collection task failed: {}", e);
}
}
6 changes: 6 additions & 0 deletions config.example.toml
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,12 @@ key_path = "./keys.example.json"
prometheus_config = "./docker/prometheus.yml"
use_grafana = true

[logger]
log_dir = "logs"
retention_period_days = 1
log_collection_interval_secs = 30
compose_path = "./cb.docker-compose.yml"

[[modules]]
id = "DA_COMMIT"
docker_image = "test_da_commit"
Expand Down
34 changes: 24 additions & 10 deletions crates/cli/src/docker_cmd.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,9 @@
use std::process::{Command, Stdio};

use std::fs;
use chrono::{Utc, Duration as ChronoDuration};


pub fn handle_docker_start(compose_path: String, env_path: String) -> eyre::Result<()> {
println!("Starting Commit-Boost with compose file: {}", compose_path);

Expand Down Expand Up @@ -45,22 +49,32 @@ pub fn handle_docker_stop(compose_path: String, env_path: String) -> eyre::Resul
Ok(())
}

// TODO: we shouldnt use docker logs
pub fn handle_docker_logs(compose_path: String) -> eyre::Result<()> {
pub async fn handle_docker_logs(compose_path: String, log_file_name: Option<String>, retention_period_days: Option<i64>) -> Result<()> {
let log_file_name = log_file_name.unwrap_or_else(|| "logs.txt".to_string());
let retention_period_days = retention_period_days.unwrap_or(1);

println!("Querying Commit-Boost with compose file: {}", compose_path);

// TODO: if permission denied, print warning to run as sudo
// Create or append to the log file
let file = File::create(&log_file_name)?;
let mut writer = BufWriter::new(file);

// start docker compose
Command::new("docker")
.stdout(Stdio::inherit())
.stderr(Stdio::inherit())
// Start docker compose and redirect output to log file
let mut child = Command::new("docker")
.arg("compose")
.arg("-f")
.arg(compose_path)
.arg(&compose_path)
.arg("logs")
.arg("-f")
.output()?;
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.spawn()?;

Ok(())
if let Some(mut stdout) = child.stdout.take() {
std::io::copy(&mut stdout, &mut writer)?;
}

if let Some(mut stderr) = child.stderr.take() {
std::io::copy(&mut stderr, &mut writer)?;
}
}
17 changes: 17 additions & 0 deletions crates/cli/src/docker_init.rs
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,22 @@ pub fn handle_docker_init(config_path: String, output_dir: String) -> eyre::Resu

services.insert("cb_pbs".to_owned(), Some(pbs_service));

let logger_envs = IndexMap::from([
get_env_same(CB_CONFIG_ENV)
]);

let logs_volume = Volumes::Simple(format!("./{}:/{}", cb_config.logger.log_dir, cb_config.logger.log_dir));

let logger_service = Service {
container_name: Some("cb_logger".to_string()),
image: Some("commitboost_logger".to_string()),
environment: Environment::KvPair(logger_envs),
volumes: vec![config_volume.clone(), logs_volume.clone()],
..Service::default()
};

services.insert("cb_logger".to_owned(), Some(logger_service));

// setup modules
if let Some(modules_config) = cb_config.modules {
for module in modules_config {
Expand Down Expand Up @@ -221,6 +237,7 @@ pub fn handle_docker_init(config_path: String, output_dir: String) -> eyre::Resu
networks: Networks::Simple(vec![METRICS_NETWORK.to_owned()]),
depends_on: DependsOnOptions::Simple(vec!["cb_prometheus".to_owned()]),
environment: Environment::List(vec!["GF_SECURITY_ADMIN_PASSWORD=admin".to_owned()]),
volumes: vec![Volumes::Simple("./grafana/dashboards:/etc/grafana/provisioning/dashboards".to_owned()), Volumes::Simple("./grafana/datasources:/etc/grafana/provisioning/datasources".to_owned())],
// TODO: re-enable logging here once we move away from docker logs
logging: Some(LoggingParameters { driver: Some("none".to_owned()), options: None }),
..Service::default()
Expand Down
30 changes: 30 additions & 0 deletions crates/common/src/config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@ pub struct CommitBoostConfig {
pub modules: Option<Vec<StaticModuleConfig>>,
pub signer: Option<SignerConfig>,
pub metrics: MetricsConfig,
pub logger: LoggerConfig
}

fn load_from_file<T: DeserializeOwned>(path: &str) -> T {
Expand Down Expand Up @@ -173,6 +174,23 @@ pub struct PbsModuleConfig<T = ()> {
pub extra: T,
}

#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct LoggerConfig {
/// path to docker-compose config. Used to get logs for all active containers for this project
pub compose_path: String,

/// directory to put log files into
pub log_dir: String,

/// how long to keep log files for
#[serde(default = "default_u64::<1>")]
pub retention_period_days: u64,

/// how often to collect logs (in seconds)
#[serde(default = "default_u64::<30>")]
pub log_collection_interval_secs: u64
}

const fn default_u64<const U: u64>() -> u64 {
U
}
Expand Down Expand Up @@ -200,6 +218,18 @@ pub fn load_pbs_config() -> eyre::Result<PbsModuleConfig<()>> {
})
}

/// Loads the default pbs config, i.e. with no signer client or custom data
pub fn load_logger_config() -> eyre::Result<LoggerConfig> {
let config = CommitBoostConfig::from_env_path();
Ok(LoggerConfig {
compose_path: config.logger.compose_path,
log_dir: config.logger.log_dir,
retention_period_days: config.logger.retention_period_days,
log_collection_interval_secs: config.logger.log_collection_interval_secs
})
}


/// Loads a custom pbs config, i.e. with signer client and/or custom data
pub fn load_pbs_custom_config<T: DeserializeOwned>() -> eyre::Result<PbsModuleConfig<T>> {
#[derive(Debug, Deserialize)]
Expand Down
1 change: 0 additions & 1 deletion crates/common/src/utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,6 @@ use blst::min_pk::{PublicKey, Signature};
use rand::{distributions::Alphanumeric, Rng};
use reqwest::header::HeaderMap;
use tracing_subscriber::{fmt, layer::SubscriberExt, util::SubscriberInitExt, EnvFilter};

use crate::types::Chain;

const SECONDS_PER_SLOT: u64 = 12;
Expand Down
27 changes: 27 additions & 0 deletions docker/logger.Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,27 @@
FROM lukemathwalker/cargo-chef:latest-rust-1 AS chef
WORKDIR /app

FROM chef AS planner
COPY . .
RUN cargo chef prepare --recipe-path recipe.json

FROM chef AS builder
COPY --from=planner /app/recipe.json recipe.json

RUN cargo chef cook --release --recipe-path recipe.json

COPY . .
RUN cargo build --release


FROM ubuntu AS runtime
WORKDIR /app

RUN apt-get update
RUN apt-get install -y openssl ca-certificates libssl3 libssl-dev

COPY --from=builder /app/target/release/logger /usr/local/bin
ENTRYPOINT ["/usr/local/bin/logger"]



Loading