Skip to content

Commit

Permalink
Improved log interface performance and streaming
Browse files Browse the repository at this point in the history
This commit enhances the log interface by reducing lag and improving the
rendering speed. It introduces a feature to load the logs for the past day
first and logs are now streamed in real-time from backend, ensuring
immediate access to new logs. Additionally, a download button that
downloads all logs since container was started is now made available
at the top of the logs dialog.
  • Loading branch information
samuchila committed Feb 13, 2024
1 parent a21eebb commit 3302462
Show file tree
Hide file tree
Showing 12 changed files with 541 additions and 255 deletions.
1 change: 1 addition & 0 deletions api/Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions api/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ path = "src/main.rs"

[dependencies]
async-trait = "0.1"
async-stream = "0.3"
base64 = "0.21"
boa_engine = "0.17"
bytesize = { version = "1.3", features = ["serde"] }
Expand Down
12 changes: 10 additions & 2 deletions api/res/openapi.yml
Original file line number Diff line number Diff line change
Expand Up @@ -246,12 +246,20 @@ paths:
example: '2019-07-22T08:42:47-00:00'
- in: query
name: limit
description: The number of log lines to retrieve. If not present, 1000 lines will be retrieved.
description: The number of log lines to retrieve. If not present, all the lines from `since` are retrieved.
schema:
type: integer
- in: query
name: asAttachment
description: >-
Determines how the response is presented by the browser. When `true`, the response content is provided as a downloadable attachment.
If `false` or not provided, the response is displayed inline.
schema:
type: boolean
responses:
'200':
description: The available log statements
description: |
The available log statements. MIME type `text/event-stream` supports streaming of logs.
headers:
Link:
schema:
Expand Down
98 changes: 84 additions & 14 deletions api/src/apps/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,8 @@ use crate::models::{AppName, AppStatusChangeId, LogChunk, ServiceConfig};
use crate::registry::Registry;
use crate::registry::RegistryError;
use chrono::{DateTime, FixedOffset};
use futures::stream::BoxStream;
use futures::StreamExt;
use handlebars::RenderError;
pub use host_meta_cache::new as host_meta_crawling;
pub use host_meta_cache::HostMetaCache;
Expand Down Expand Up @@ -349,22 +351,38 @@ impl AppsService {
}
}

pub async fn get_logs(
&self,
app_name: &AppName,
service_name: &str,
since: &Option<DateTime<FixedOffset>>,
limit: usize,
pub async fn stream_logs<'a>(
&'a self,
app_name: &'a AppName,
service_name: &'a str,
since: &'a Option<DateTime<FixedOffset>>,
limit: &'a Option<usize>,
) -> BoxStream<'a, Result<(DateTime<FixedOffset>, String), failure::Error>> {
self.infrastructure
.get_logs(app_name, service_name, since, limit, true)
.await
}

pub async fn get_logs<'a>(
&'a self,
app_name: &'a AppName,
service_name: &'a str,
since: &'a Option<DateTime<FixedOffset>>,
limit: &'a Option<usize>,
) -> Result<Option<LogChunk>, AppsServiceError> {
match self
let mut log_lines = Vec::new();
let mut log_stream = self
.infrastructure
.get_logs(app_name, service_name, since, limit)
.await?
{
None => Ok(None),
Some(ref logs) if logs.is_empty() => Ok(None),
Some(logs) => Ok(Some(LogChunk::from(logs))),
.get_logs(app_name, service_name, since, limit, false)
.await;

while let Some(result) = log_stream.next().await {
if let Ok(log_line) = result {
log_lines.push(log_line);
}
}

Ok(Some(LogChunk::from(log_lines)))
}

pub async fn change_status(
Expand Down Expand Up @@ -683,7 +701,7 @@ mod tests {
.await?;

let log_chunk = apps
.get_logs(&app_name, &String::from("service-a"), &None, 100)
.get_logs(&app_name, &String::from("service-a"), &None, &Some(100))
.await
.unwrap()
.unwrap();
Expand All @@ -709,6 +727,58 @@ Log msg 3 of service-a of app master
Ok(())
}

#[tokio::test]
async fn should_stream_logs_from_infrastructure() -> Result<(), AppsServiceError> {
let config = Config::default();
let infrastructure = Box::new(Dummy::new());
let apps = AppsService::new(config, infrastructure)?;

let app_name = AppName::from_str("master").unwrap();
let services = vec![sc!("service-a"), sc!("service-b")];
apps.create_or_update(&app_name, &AppStatusChangeId::new(), None, &services)
.await?;
for service in services {
let mut log_stream = apps
.stream_logs(&app_name, service.service_name(), &None, &None)
.await;

assert_eq!(
log_stream.next().await.unwrap().unwrap(),
(
DateTime::parse_from_rfc3339("2019-07-18T07:25:00.000000000Z").unwrap(),
format!(
"Log msg 1 of {} of app {app_name}\n",
service.service_name()
)
)
);

assert_eq!(
log_stream.next().await.unwrap().unwrap(),
(
DateTime::parse_from_rfc3339("2019-07-18T07:30:00.000000000Z").unwrap(),
format!(
"Log msg 2 of {} of app {app_name}\n",
service.service_name()
)
)
);

assert_eq!(
log_stream.next().await.unwrap().unwrap(),
(
DateTime::parse_from_rfc3339("2019-07-18T07:35:00.000000000Z").unwrap(),
format!(
"Log msg 3 of {} of app {app_name}\n",
service.service_name()
)
)
);
}

Ok(())
}

#[tokio::test]
async fn should_deploy_companions() -> Result<(), AppsServiceError> {
let config = config_from_str!(
Expand Down
Loading

0 comments on commit 3302462

Please sign in to comment.