From e591e089aedf36f75482ff449f2b8779904ccabe Mon Sep 17 00:00:00 2001 From: Louis Pahlavi Date: Wed, 19 Feb 2025 18:05:31 +0100 Subject: [PATCH 01/20] XC-286: Add logging functionality --- Cargo.lock | 14 ++ Cargo.toml | 3 +- canister/Cargo.toml | 2 + canister/sol_rpc_canister.did | 7 + canister/src/constants.rs | 3 +- canister/src/http_types/mod.rs | 106 +++++++++++++++ canister/src/http_types/tests.rs | 20 +++ canister/src/lib.rs | 2 + canister/src/lifecycle/mod.rs | 26 ++-- canister/src/logs/mod.rs | 168 +++++++++++++++++++++++ canister/src/logs/tests.rs | 223 +++++++++++++++++++++++++++++++ canister/src/main.rs | 109 +++++++++++++-- canister/src/state/mod.rs | 12 +- canister/src/types/mod.rs | 59 +++++++- canister/src/validate/mod.rs | 2 +- integration_tests/tests/tests.rs | 27 ++++ libs/client/Cargo.toml | 5 +- libs/client/src/lib.rs | 22 +++ libs/types/src/lib.rs | 8 +- libs/types/src/lifecycle/mod.rs | 21 ++- libs/types/src/regex/mod.rs | 38 ++++++ libs/types/src/rpc_client/mod.rs | 37 +---- 22 files changed, 838 insertions(+), 76 deletions(-) create mode 100644 canister/src/http_types/mod.rs create mode 100644 canister/src/http_types/tests.rs create mode 100644 canister/src/logs/mod.rs create mode 100644 canister/src/logs/tests.rs create mode 100644 libs/types/src/regex/mod.rs diff --git a/Cargo.lock b/Cargo.lock index 77f6cd1d..d3a04059 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1259,6 +1259,15 @@ dependencies = [ "tracing", ] +[[package]] +name = "ic-canister-log" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cb82c4f617ecff6e452fe65af0489626ec7330ffe3eedd9ea14e6178eea48d1a" +dependencies = [ + "serde", +] + [[package]] name = "ic-cdk" version = "0.17.1" @@ -2866,11 +2875,13 @@ dependencies = [ "ciborium", "const_format", "hex", + "ic-canister-log", "ic-cdk", "ic-stable-structures", "proptest", "regex", "serde", + "serde_bytes", "serde_json", "sol_rpc_types", "url", @@ -2885,6 +2896,9 @@ dependencies = [ "candid", "ic-cdk", "serde", + "serde_bytes", + "serde_json", + "sol_rpc_canister", "sol_rpc_types", ] diff --git a/Cargo.toml b/Cargo.toml index 3e555d38..cafd7731 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -35,6 +35,7 @@ ciborium = "0.2.2" const_format = "0.2.34" getrandom = { version = "*", default-features = false, features = ["custom"] } hex = "0.4.3" +ic-canister-log = "0.2.0" ic-cdk = "0.17.1" ic-ed25519 = "0.1.0" ic-stable-structures = "0.6.7" @@ -43,6 +44,7 @@ pocket-ic = "6.0.0" proptest = "1.6.0" regex = "1.11.1" serde = { version = "1.0.217", features = ["derive"] } +serde_bytes = "0.11.15" serde_json = "1.0.138" solana-hash = "2.2.0" solana-instruction = "2.2.0" @@ -53,7 +55,6 @@ solana-pubkey = "2.2.0" solana-signature = "2.2.0" solana-transaction = { version = "2.2.0", features = ["bincode"] } strum = { version = "0.27.0", features = ["derive"] } -thiserror = "2.0.11" tokio = "1.43.0" url = "2.5" zeroize = { version = "1.8", features = ["zeroize_derive"] } diff --git a/canister/Cargo.toml b/canister/Cargo.toml index 0dc760e0..ece94bcd 100644 --- a/canister/Cargo.toml +++ b/canister/Cargo.toml @@ -17,11 +17,13 @@ candid = { workspace = true } ciborium = { workspace = true } const_format = { workspace = true } hex = { workspace = true } +ic-canister-log = { workspace = true } ic-cdk = { workspace = true } ic-stable-structures = { workspace = true } sol_rpc_types = { path = "../libs/types" } regex = { workspace = true } serde = { workspace = true } +serde_bytes = {workspace = true} serde_json = { workspace = true } url = { workspace = true } zeroize = { workspace = true } diff --git a/canister/sol_rpc_canister.did b/canister/sol_rpc_canister.did index b8e03c01..17753d2d 100644 --- a/canister/sol_rpc_canister.did +++ b/canister/sol_rpc_canister.did @@ -54,9 +54,16 @@ type RegexSubstitution = record { type OverrideProvider = record { overrideUrl : opt RegexSubstitution }; +type LogFilter = variant { + ShowAll; + HideAll; + ShowPattern : Regex; + HidePattern : Regex; +}; type InstallArgs = record { manageApiKeys: opt vec principal; overrideProvider: opt OverrideProvider; + logFilter: opt LogFilter; }; service : (InstallArgs,) -> { getProviders : () -> (vec Provider) query; diff --git a/canister/src/constants.rs b/canister/src/constants.rs index 744e29ff..f8ad89c7 100644 --- a/canister/src/constants.rs +++ b/canister/src/constants.rs @@ -1,4 +1,5 @@ pub const API_KEY_REPLACE_STRING: &str = "{API_KEY}"; -pub const API_KEY_MAX_SIZE: usize = 512; +pub const API_KEY_MAX_SIZE: u32 = 512; +pub const MESSAGE_FILTER_MAX_SIZE: u32 = 1000; pub const VALID_API_KEY_CHARS: &str = "0123456789ABCDEFGHIJKLMNOPQRTSUVWXYZabcdefghijklmnopqrstuvwxyz$-_.+!*"; diff --git a/canister/src/http_types/mod.rs b/canister/src/http_types/mod.rs new file mode 100644 index 00000000..0b4247e3 --- /dev/null +++ b/canister/src/http_types/mod.rs @@ -0,0 +1,106 @@ +//! Copy of the types from the unpublished [`ic-canisters-http-types`](https://github.com/dfinity/ic/blob/f4242cbcf83f0725663f3cd1a6b3a83eb2dace01/rs/rust_canisters/http_types/src/lib.rs) crate. + +#[cfg(test)] +mod tests; + +use candid::{CandidType, Deserialize}; +use serde_bytes::ByteBuf; + +#[derive(Clone, Debug, CandidType, Deserialize)] +pub struct HttpRequest { + pub method: String, + pub url: String, + pub headers: Vec<(String, String)>, + pub body: ByteBuf, +} + +impl HttpRequest { + pub fn path(&self) -> &str { + match self.url.find('?') { + None => &self.url[..], + Some(index) => &self.url[..index], + } + } + + /// Searches for the first appearance of a parameter in the request URL. + /// Returns `None` if the given parameter does not appear in the query. + pub fn raw_query_param(&self, param: &str) -> Option<&str> { + const QUERY_SEPARATOR: &str = "?"; + let query_string = self.url.split(QUERY_SEPARATOR).nth(1)?; + if query_string.is_empty() { + return None; + } + const PARAMETER_SEPARATOR: &str = "&"; + for chunk in query_string.split(PARAMETER_SEPARATOR) { + const KEY_VALUE_SEPARATOR: &str = "="; + let mut split = chunk.splitn(2, KEY_VALUE_SEPARATOR); + let name = split.next()?; + if name == param { + return Some(split.next().unwrap_or_default()); + } + } + None + } +} + +#[derive(Clone, Debug, CandidType, Deserialize)] +pub struct HttpResponse { + pub status_code: u16, + pub headers: Vec<(String, String)>, + pub body: ByteBuf, +} + +pub struct HttpResponseBuilder(HttpResponse); + +impl HttpResponseBuilder { + pub fn ok() -> Self { + Self(HttpResponse { + status_code: 200, + headers: vec![], + body: ByteBuf::default(), + }) + } + + pub fn bad_request() -> Self { + Self(HttpResponse { + status_code: 400, + headers: vec![], + body: ByteBuf::from("bad request"), + }) + } + + pub fn not_found() -> Self { + Self(HttpResponse { + status_code: 404, + headers: vec![], + body: ByteBuf::from("not found"), + }) + } + + pub fn server_error(reason: impl ToString) -> Self { + Self(HttpResponse { + status_code: 500, + headers: vec![], + body: ByteBuf::from(reason.to_string()), + }) + } + + pub fn header(mut self, name: impl ToString, value: impl ToString) -> Self { + self.0.headers.push((name.to_string(), value.to_string())); + self + } + + pub fn body(mut self, bytes: impl Into>) -> Self { + self.0.body = ByteBuf::from(bytes.into()); + self + } + + pub fn with_body_and_content_length(self, bytes: impl Into>) -> Self { + let bytes = bytes.into(); + self.header("Content-Length", bytes.len()).body(bytes) + } + + pub fn build(self) -> HttpResponse { + self.0 + } +} diff --git a/canister/src/http_types/tests.rs b/canister/src/http_types/tests.rs new file mode 100644 index 00000000..eaac5677 --- /dev/null +++ b/canister/src/http_types/tests.rs @@ -0,0 +1,20 @@ +use crate::http_types::HttpRequest; + +#[test] +fn test_raw_query_param() { + fn request_with_url(url: String) -> HttpRequest { + HttpRequest { + method: "".to_string(), + url, + headers: vec![], + body: Default::default(), + } + } + let http_request = request_with_url("/endpoint?time=1000".to_string()); + assert_eq!(http_request.raw_query_param("time"), Some("1000")); + let http_request = request_with_url("/endpoint".to_string()); + assert_eq!(http_request.raw_query_param("time"), None); + let http_request = + request_with_url("/endpoint?time=1000&time=1001&other=abcde&time=1002".to_string()); + assert_eq!(http_request.raw_query_param("time"), Some("1000")); +} diff --git a/canister/src/lib.rs b/canister/src/lib.rs index b1611b30..cf5a74a6 100644 --- a/canister/src/lib.rs +++ b/canister/src/lib.rs @@ -1,5 +1,7 @@ pub mod constants; +pub mod http_types; pub mod lifecycle; +pub mod logs; pub mod providers; pub mod rpc_client; pub mod state; diff --git a/canister/src/lifecycle/mod.rs b/canister/src/lifecycle/mod.rs index 4617f509..48704e5d 100644 --- a/canister/src/lifecycle/mod.rs +++ b/canister/src/lifecycle/mod.rs @@ -1,29 +1,29 @@ -use crate::state::{init_state, mutate_state, State}; +use crate::{ + logs::INFO, + state::{init_state, mutate_state, State}, +}; +use ic_canister_log::log; use sol_rpc_types::InstallArgs; pub fn init(args: InstallArgs) { - // TODO XC-286: Add logging - // log!( - // INFO, - // "[init]: initialized SOL RPC canister with arg: {:?}", - // args - // ); init_state(State::from(args)); } pub fn post_upgrade(args: Option) { if let Some(args) = args { - // TODO XC-286: Add logging - // log!( - // INFO, - // "[init]: upgraded SOL RPC canister with arg: {:?}", - // args - // ); + log!( + INFO, + "[init]: upgraded SOL RPC canister with arg: {:?}", + args + ); if let Some(api_key_principals) = args.manage_api_keys { mutate_state(|s| s.set_api_key_principals(api_key_principals)); } if let Some(override_provider) = args.override_provider { mutate_state(|s| s.set_override_provider(override_provider.into())); } + if let Some(log_filter) = args.log_filter { + mutate_state(|s| s.set_log_filter(log_filter.into())); + } } } diff --git a/canister/src/logs/mod.rs b/canister/src/logs/mod.rs new file mode 100644 index 00000000..eb0f0b82 --- /dev/null +++ b/canister/src/logs/mod.rs @@ -0,0 +1,168 @@ +#[cfg(test)] +mod tests; + +use crate::state::read_state; +use candid::CandidType; +use ic_canister_log::{declare_log_buffer, export as export_logs, GlobalBuffer, Sink}; +use serde::Deserialize; +use std::str::FromStr; + +// High-priority messages. +declare_log_buffer!(name = INFO_BUF, capacity = 1000); + +// Low-priority info messages. +declare_log_buffer!(name = DEBUG_BUF, capacity = 1000); + +// Trace of HTTP requests and responses. +declare_log_buffer!(name = TRACE_HTTP_BUF, capacity = 1000); + +pub const INFO: PrintProxySink = PrintProxySink(&Priority::Info, &INFO_BUF); +pub const DEBUG: PrintProxySink = PrintProxySink(&Priority::Debug, &DEBUG_BUF); +pub const TRACE_HTTP: PrintProxySink = PrintProxySink(&Priority::TraceHttp, &TRACE_HTTP_BUF); + +#[derive(Debug)] +pub struct PrintProxySink(&'static Priority, &'static GlobalBuffer); + +impl Sink for PrintProxySink { + fn append(&self, entry: ic_canister_log::LogEntry) { + let message = format!( + "{} {}:{} {}", + self.0.as_str_uppercase(), + entry.file, + entry.line, + entry.message, + ); + if read_state(|state| state.get_log_filter()).is_match(&message) { + ic_cdk::println!("{}", message); + self.1.append(entry) + } + } +} + +#[derive(Copy, Clone, Debug, Eq, PartialEq, CandidType, Deserialize, serde::Serialize)] +pub enum Priority { + Info, + TraceHttp, + Debug, +} + +impl Priority { + pub fn as_str_uppercase(self) -> &'static str { + match self { + Priority::Info => "INFO", + Priority::TraceHttp => "TRACE_HTTP", + Priority::Debug => "DEBUG", + } + } +} + +impl FromStr for Priority { + type Err = String; + + fn from_str(s: &str) -> Result { + match s.to_lowercase().as_str() { + "info" => Ok(Priority::Info), + "trace_http" => Ok(Priority::TraceHttp), + "debug" => Ok(Priority::Debug), + _ => Err("could not recognize priority".to_string()), + } + } +} + +#[derive(Copy, Clone, Debug, Deserialize, serde::Serialize)] +pub enum Sort { + Ascending, + Descending, +} + +impl FromStr for Sort { + type Err = String; + + fn from_str(s: &str) -> Result { + match s.to_lowercase().as_str() { + "asc" => Ok(Sort::Ascending), + "desc" => Ok(Sort::Descending), + _ => Err("could not recognize sort order".to_string()), + } + } +} + +#[derive(Clone, Debug, Eq, PartialEq, Deserialize, serde::Serialize)] +pub struct LogEntry { + pub timestamp: u64, + pub priority: Priority, + pub file: String, + pub line: u32, + pub message: String, + pub counter: u64, +} + +#[derive(Clone, Debug, Default, Deserialize, serde::Serialize)] +pub struct Log { + pub entries: Vec, +} + +impl Log { + pub fn push_logs(&mut self, priority: Priority) { + let logs = match priority { + Priority::Info => export_logs(&INFO_BUF), + Priority::TraceHttp => export_logs(&TRACE_HTTP_BUF), + Priority::Debug => export_logs(&DEBUG_BUF), + }; + for entry in logs { + self.entries.push(LogEntry { + timestamp: entry.timestamp, + counter: entry.counter, + priority, + file: entry.file.to_string(), + line: entry.line, + message: entry.message, + }); + } + } + + pub fn push_all(&mut self) { + self.push_logs(Priority::Info); + self.push_logs(Priority::TraceHttp); + self.push_logs(Priority::Debug); + } + + pub fn serialize_logs(&self, max_body_size: usize) -> String { + let mut entries_json: String = serde_json::to_string(&self).unwrap_or_default(); + + if entries_json.len() > max_body_size { + let mut left = 0; + let mut right = self.entries.len(); + + while left < right { + let mid = left + (right - left) / 2; + let mut temp_log = self.clone(); + temp_log.entries.truncate(mid); + let temp_entries_json = serde_json::to_string(&temp_log).unwrap_or_default(); + + if temp_entries_json.len() <= max_body_size { + entries_json = temp_entries_json; + left = mid + 1; + } else { + right = mid; + } + } + } + entries_json + } + + pub fn sort_logs(&mut self, sort_order: Sort) { + match sort_order { + Sort::Ascending => self.sort_asc(), + Sort::Descending => self.sort_desc(), + } + } + + pub fn sort_asc(&mut self) { + self.entries.sort_by(|a, b| a.timestamp.cmp(&b.timestamp)); + } + + pub fn sort_desc(&mut self) { + self.entries.sort_by(|a, b| b.timestamp.cmp(&a.timestamp)); + } +} diff --git a/canister/src/logs/tests.rs b/canister/src/logs/tests.rs new file mode 100644 index 00000000..4aafe073 --- /dev/null +++ b/canister/src/logs/tests.rs @@ -0,0 +1,223 @@ +use super::PrintProxySink; +use crate::{ + logs::{Log, LogEntry, Priority, Sort}, + state::{init_state, mutate_state, State}, + types::LogFilter, +}; +use ic_canister_log::{declare_log_buffer, export, log}; +use proptest::{prop_assert, proptest}; + +declare_log_buffer!(name = INFO_TEST_BUF, capacity = 1000); +const INFO_TEST: PrintProxySink = PrintProxySink(&Priority::Info, &INFO_TEST_BUF); + +fn info_log_entry_with_timestamp(timestamp: u64) -> LogEntry { + LogEntry { + timestamp, + priority: Priority::Info, + file: String::default(), + line: 0, + message: String::default(), + counter: 0, + } +} + +fn is_ascending(log: &Log) -> bool { + for i in 0..log.entries.len() - 1 { + if log.entries[i].timestamp > log.entries[i + 1].timestamp { + return false; + } + } + true +} + +fn is_descending(log: &Log) -> bool { + for i in 0..log.entries.len() - 1 { + if log.entries[i].timestamp < log.entries[i + 1].timestamp { + return false; + } + } + true +} + +fn get_messages() -> Vec { + export(&INFO_TEST_BUF) + .into_iter() + .map(|entry| entry.message) + .collect() +} + +proptest! { + #[test] + fn logs_always_fit_in_message( + number_of_entries in (1..100_usize), + entry_size in (1..10000_usize), + max_body_size in (100..10000_usize) + ) { + let mut entries: Vec = vec![]; + for _ in 0..number_of_entries { + entries.push(LogEntry { + timestamp: 0, + priority: Priority::Info, + file: String::default(), + line: 0, + message: "1".repeat(entry_size), + counter: 0, + }); + } + let log = Log { entries }; + let truncated_logs_json_len = log.serialize_logs(max_body_size).len(); + prop_assert!(truncated_logs_json_len <= max_body_size); + } +} + +#[test] +fn sorting_order() { + let mut log = Log { entries: vec![] }; + log.entries.push(info_log_entry_with_timestamp(2)); + log.entries.push(info_log_entry_with_timestamp(0)); + log.entries.push(info_log_entry_with_timestamp(1)); + log.sort_asc(); + assert!(is_ascending(&log)); + + log.sort_desc(); + assert!(is_descending(&log)); + + log.sort_logs(Sort::Ascending); + assert!(is_ascending(&log)); + + log.sort_logs(Sort::Descending); + assert!(is_descending(&log)); +} + +#[test] +fn simple_logs_truncation() { + let mut entries: Vec = vec![]; + const MAX_BODY_SIZE: usize = 3_000_000; + + for _ in 0..10 { + entries.push(LogEntry { + timestamp: 0, + priority: Priority::Info, + file: String::default(), + line: 0, + message: String::default(), + counter: 0, + }); + } + let log = Log { + entries: entries.clone(), + }; + let small_len = serde_json::to_string(&log).unwrap_or_default().len(); + + entries.push(LogEntry { + timestamp: 0, + priority: Priority::Info, + file: String::default(), + line: 0, + message: "1".repeat(MAX_BODY_SIZE), + counter: 0, + }); + let log = Log { entries }; + let entries_json = serde_json::to_string(&log).unwrap_or_default(); + assert!(entries_json.len() > MAX_BODY_SIZE); + + let truncated_logs_json = log.serialize_logs(MAX_BODY_SIZE); + + assert_eq!(small_len, truncated_logs_json.len()); +} + +#[test] +fn one_entry_too_big() { + let mut entries: Vec = vec![]; + const MAX_BODY_SIZE: usize = 3_000_000; + + entries.push(LogEntry { + timestamp: 0, + priority: Priority::Info, + file: String::default(), + line: 0, + message: "1".repeat(MAX_BODY_SIZE), + counter: 0, + }); + let log = Log { entries }; + let truncated_logs_json_len = log.serialize_logs(MAX_BODY_SIZE).len(); + assert!(truncated_logs_json_len < MAX_BODY_SIZE); + assert_eq!("{\"entries\":[]}", log.serialize_logs(MAX_BODY_SIZE)); +} + +#[test] +fn should_truncate_last_entry() { + let log_entries = vec![ + info_log_entry_with_timestamp(0), + info_log_entry_with_timestamp(1), + info_log_entry_with_timestamp(2), + ]; + let log_with_2_entries = Log { + entries: { + let mut entries = log_entries.clone(); + entries.pop(); + entries + }, + }; + let log_with_3_entries = Log { + entries: log_entries, + }; + + let serialized_log_with_2_entries = log_with_2_entries.serialize_logs(usize::MAX); + let serialized_log_with_3_entries = + log_with_3_entries.serialize_logs(serialized_log_with_2_entries.len()); + + assert_eq!(serialized_log_with_3_entries, serialized_log_with_2_entries); +} + +#[test] +fn should_show_all() { + init_state(State::default()); + mutate_state(|state| state.set_log_filter(LogFilter::ShowAll)); + log!(INFO_TEST, "ABC"); + log!(INFO_TEST, "123"); + log!(INFO_TEST, "!@#"); + assert_eq!(get_messages(), vec!["ABC", "123", "!@#"]); +} + +#[test] +fn should_hide_all() { + init_state(State::default()); + mutate_state(|state| state.set_log_filter(LogFilter::HideAll)); + log!(INFO_TEST, "ABC"); + log!(INFO_TEST, "123"); + log!(INFO_TEST, "!@#"); + assert_eq!(get_messages().len(), 0); +} + +#[test] +fn should_show_pattern() { + init_state(State::default()); + mutate_state(|state| state.set_log_filter(LogFilter::ShowPattern("end$".into()))); + log!(INFO_TEST, "message"); + log!(INFO_TEST, "message end"); + log!(INFO_TEST, "end message"); + assert_eq!(get_messages(), vec!["message end"]); +} + +#[test] +fn should_hide_pattern_including_message_type() { + init_state(State::default()); + mutate_state(|state| state.set_log_filter(LogFilter::ShowPattern("^INFO [^ ]* 123".into()))); + log!(INFO_TEST, "123"); + log!(INFO_TEST, "INFO 123"); + log!(INFO_TEST, ""); + log!(INFO_TEST, "123456"); + assert_eq!(get_messages(), vec!["123", "123456"]); +} + +#[test] +fn should_hide_pattern() { + init_state(State::default()); + mutate_state(|state| state.set_log_filter(LogFilter::HidePattern("[ABC]".into()))); + log!(INFO_TEST, "remove A"); + log!(INFO_TEST, "...B..."); + log!(INFO_TEST, "C"); + log!(INFO_TEST, "message"); + assert_eq!(get_messages(), vec!["message"]); +} diff --git a/canister/src/main.rs b/canister/src/main.rs index 5cdc49eb..3ca43d34 100644 --- a/canister/src/main.rs +++ b/canister/src/main.rs @@ -1,8 +1,12 @@ use candid::candid_method; -use ic_cdk::api::is_controller; -use ic_cdk::{query, update}; +use ic_canister_log::log; +use ic_cdk::{ + api::is_controller, + {query, update}, +}; use sol_rpc_canister::{ - lifecycle, + http_types, lifecycle, + logs::INFO, providers::{find_provider, PROVIDERS}, state::{mutate_state, read_state}, }; @@ -35,17 +39,16 @@ fn get_providers() -> Vec { /// /// Panics if the list of provider IDs includes a nonexistent or "unauthenticated" (fully public) provider. async fn update_api_keys(api_keys: Vec<(ProviderId, Option)>) { - // TODO XC-286: Add logs - // log!( - // INFO, - // "[{}] Updating API keys for providers: {}", - // ic_cdk::caller(), - // api_keys - // .iter() - // .map(|(id, _)| id.to_string()) - // .collect::>() - // .join(", ") - // ); + log!( + INFO, + "[{}] Updating API keys for providers: {}", + ic_cdk::caller(), + api_keys + .iter() + .map(|(id, _)| id.to_string()) + .collect::>() + .join(", ") + ); for (provider_id, api_key) in api_keys { let provider = find_provider(|provider| provider.provider_id == provider_id) .unwrap_or_else(|| panic!("Provider not found: {}", provider_id)); @@ -64,6 +67,84 @@ async fn update_api_keys(api_keys: Vec<(ProviderId, Option)>) { } } +#[query(hidden = true)] +fn http_request(request: http_types::HttpRequest) -> http_types::HttpResponse { + match request.path() { + "/logs" => { + use sol_rpc_canister::logs::{Log, Priority, Sort}; + use std::str::FromStr; + + let max_skip_timestamp = match request.raw_query_param("time") { + Some(arg) => match u64::from_str(arg) { + Ok(value) => value, + Err(_) => { + return http_types::HttpResponseBuilder::bad_request() + .with_body_and_content_length("failed to parse the 'time' parameter") + .build() + } + }, + None => 0, + }; + + let mut log: Log = Default::default(); + + match request.raw_query_param("priority").map(Priority::from_str) { + Some(Ok(priority)) => match priority { + Priority::Info => log.push_logs(Priority::Info), + Priority::Debug => log.push_logs(Priority::Debug), + Priority::TraceHttp => {} + }, + Some(Err(_)) => { + return http_types::HttpResponseBuilder::bad_request() + .with_body_and_content_length("failed to parse the 'priority' parameter") + .build() + } + None => { + log.push_logs(Priority::Info); + log.push_logs(Priority::Debug); + } + } + + log.entries + .retain(|entry| entry.timestamp >= max_skip_timestamp); + + fn ordering_from_query_params(sort: Option<&str>, max_skip_timestamp: u64) -> Sort { + match sort { + Some(ord_str) => match Sort::from_str(ord_str) { + Ok(order) => order, + Err(_) => { + if max_skip_timestamp == 0 { + Sort::Ascending + } else { + Sort::Descending + } + } + }, + None => { + if max_skip_timestamp == 0 { + Sort::Ascending + } else { + Sort::Descending + } + } + } + } + + log.sort_logs(ordering_from_query_params( + request.raw_query_param("sort"), + max_skip_timestamp, + )); + + const MAX_BODY_SIZE: usize = 2_000_000; + http_types::HttpResponseBuilder::ok() + .header("Content-Type", "application/json; charset=utf-8") + .with_body_and_content_length(log.serialize_logs(MAX_BODY_SIZE)) + .build() + } + _ => http_types::HttpResponseBuilder::not_found().build(), + } +} + #[query( guard = "require_api_key_principal_or_controller", name = "verifyApiKey", diff --git a/canister/src/state/mod.rs b/canister/src/state/mod.rs index c7d43a36..a32ef5c1 100644 --- a/canister/src/state/mod.rs +++ b/canister/src/state/mod.rs @@ -1,7 +1,7 @@ #[cfg(test)] mod tests; -use crate::types::{ApiKey, OverrideProvider}; +use crate::types::{ApiKey, LogFilter, OverrideProvider}; use candid::{Deserialize, Principal}; use ic_stable_structures::{ memory_manager::{MemoryId, MemoryManager, VirtualMemory}, @@ -84,6 +84,7 @@ pub struct State { api_keys: BTreeMap, api_key_principals: Vec, override_provider: OverrideProvider, + log_filter: LogFilter, } impl State { @@ -121,6 +122,14 @@ impl State { pub fn set_override_provider(&mut self, override_provider: OverrideProvider) { self.override_provider = override_provider } + + pub fn get_log_filter(&self) -> LogFilter { + self.log_filter.clone() + } + + pub fn set_log_filter(&mut self, filter: LogFilter) { + self.log_filter = filter; + } } impl From for State { @@ -129,6 +138,7 @@ impl From for State { api_keys: Default::default(), api_key_principals: value.manage_api_keys.unwrap_or_default(), override_provider: value.override_provider.unwrap_or_default().into(), + log_filter: value.log_filter.unwrap_or_default().into(), } } } diff --git a/canister/src/types/mod.rs b/canister/src/types/mod.rs index 617973e1..d2dfcb5c 100644 --- a/canister/src/types/mod.rs +++ b/canister/src/types/mod.rs @@ -2,13 +2,13 @@ mod tests; use crate::{ - constants::{API_KEY_MAX_SIZE, API_KEY_REPLACE_STRING}, + constants::{API_KEY_MAX_SIZE, API_KEY_REPLACE_STRING, MESSAGE_FILTER_MAX_SIZE}, rpc_client, validate::validate_api_key, }; use ic_stable_structures::{storable::Bound, Storable}; use serde::{Deserialize, Serialize}; -use sol_rpc_types::{Provider, RegexSubstitution, RpcApi}; +use sol_rpc_types::{Provider, RegexString, RegexSubstitution, RpcApi}; use std::{borrow::Cow, fmt}; use zeroize::{Zeroize, ZeroizeOnDrop}; @@ -66,7 +66,7 @@ impl Storable for ApiKey { } const BOUND: Bound = Bound::Bounded { - max_size: API_KEY_MAX_SIZE as u32, + max_size: API_KEY_MAX_SIZE, is_fixed_size: false, }; } @@ -127,3 +127,56 @@ impl Storable for OverrideProvider { const BOUND: Bound = Bound::Unbounded; } + +/// Copy of [`sol_rpc_types::LogFilter`] to keep the implementation details out of the +/// [`sol_rpc_types`] crate. +#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, Default)] +pub enum LogFilter { + #[default] + ShowAll, + HideAll, + ShowPattern(RegexString), + HidePattern(RegexString), +} + +impl From for LogFilter { + fn from(value: sol_rpc_types::LogFilter) -> Self { + match value { + sol_rpc_types::LogFilter::ShowAll => LogFilter::ShowAll, + sol_rpc_types::LogFilter::HideAll => LogFilter::HideAll, + sol_rpc_types::LogFilter::ShowPattern(regex) => LogFilter::ShowPattern(regex), + sol_rpc_types::LogFilter::HidePattern(regex) => LogFilter::HidePattern(regex), + } + } +} + +impl LogFilter { + pub fn is_match(&self, message: &str) -> bool { + match self { + Self::ShowAll => true, + Self::HideAll => false, + Self::ShowPattern(regex) => regex + .try_is_valid(message) + .expect("Invalid regex in ShowPattern log filter"), + Self::HidePattern(regex) => !regex + .try_is_valid(message) + .expect("Invalid regex in HidePattern log filter"), + } + } +} + +impl Storable for LogFilter { + fn to_bytes(&self) -> Cow<[u8]> { + serde_json::to_vec(self) + .expect("Error while serializing `LogFilter`") + .into() + } + fn from_bytes(bytes: Cow<[u8]>) -> Self { + serde_json::from_slice(&bytes).expect("Error while deserializing `LogFilter`") + } + + const BOUND: Bound = Bound::Bounded { + max_size: MESSAGE_FILTER_MAX_SIZE, + is_fixed_size: true, + }; +} diff --git a/canister/src/validate/mod.rs b/canister/src/validate/mod.rs index c501b67e..ff3e5d12 100644 --- a/canister/src/validate/mod.rs +++ b/canister/src/validate/mod.rs @@ -10,7 +10,7 @@ const API_KEY_TOO_LONG_ERROR_MESSAGE: &str = pub fn validate_api_key(api_key: &str) -> Result<(), &'static str> { if api_key.is_empty() { Err("API key must not be an empty string") - } else if api_key.as_bytes().len() > API_KEY_MAX_SIZE { + } else if api_key.as_bytes().len() as u32 > API_KEY_MAX_SIZE { Err(API_KEY_TOO_LONG_ERROR_MESSAGE) } else if api_key .chars() diff --git a/integration_tests/tests/tests.rs b/integration_tests/tests/tests.rs index f9c204c4..5b03a8e5 100644 --- a/integration_tests/tests/tests.rs +++ b/integration_tests/tests/tests.rs @@ -33,6 +33,33 @@ mod get_provider_tests { } } +mod retrieve_logs_tests { + use super::*; + + #[tokio::test] + async fn should_retrieve_logs() { + let setup = Setup::new().await; + let client = setup.client(); + assert_eq!(client.retrieve_logs("DEBUG").await, vec![]); + assert_eq!(client.retrieve_logs("INFO").await, vec![]); + + // Generate some logs + setup + .client() + .with_caller(setup.controller()) + .update_api_keys(&[( + "alchemy-mainnet".to_string(), + Some("unauthorized-api-key".to_string()), + )]) + .await; + + assert_eq!(client.retrieve_logs("DEBUG").await, vec![]); + assert!(client.retrieve_logs("INFO").await[0] + .message + .contains("Updating API keys")); + } +} + mod update_api_key_tests { use super::*; diff --git a/libs/client/Cargo.toml b/libs/client/Cargo.toml index a37b8f62..92736bb5 100644 --- a/libs/client/Cargo.toml +++ b/libs/client/Cargo.toml @@ -15,4 +15,7 @@ async-trait = { workspace = true } candid = { workspace = true } ic-cdk = { workspace = true } serde = { workspace = true } -sol_rpc_types = { path = "../types" } \ No newline at end of file +serde_bytes = { workspace = true } +serde_json = { workspace = true } +sol_rpc_types = { path = "../types" } +sol_rpc_canister = { path = "../../canister" } \ No newline at end of file diff --git a/libs/client/src/lib.rs b/libs/client/src/lib.rs index 695da69b..275f2db6 100644 --- a/libs/client/src/lib.rs +++ b/libs/client/src/lib.rs @@ -8,6 +8,10 @@ use candid::utils::ArgumentEncoder; use candid::{CandidType, Principal}; use ic_cdk::api::call::RejectionCode; use serde::de::DeserializeOwned; +use sol_rpc_canister::{ + http_types::{HttpRequest, HttpResponse}, + logs::{Log, LogEntry}, +}; use sol_rpc_types::ProviderId; /// Abstract the canister runtime so that the client code can be reused: @@ -92,6 +96,24 @@ impl SolRpcClient { .await .unwrap() } + + /// Retrieve logs from the SOL RPC canister from the HTTP endpoint. + pub async fn retrieve_logs(&self, priority: &str) -> Vec { + let request = HttpRequest { + method: "".to_string(), + url: format!("/logs?priority={priority}"), + headers: vec![], + body: serde_bytes::ByteBuf::new(), + }; + let response: HttpResponse = self + .runtime + .query_call(self.sol_rpc_canister, "http_request", (request,)) + .await + .unwrap(); + serde_json::from_slice::(&response.body) + .expect("failed to parse SOL RPC canister logs") + .entries + } } #[derive(Copy, Clone, Eq, PartialEq, Debug)] diff --git a/libs/types/src/lib.rs b/libs/types/src/lib.rs index f06f2b03..1dc33537 100644 --- a/libs/types/src/lib.rs +++ b/libs/types/src/lib.rs @@ -4,10 +4,12 @@ #![forbid(missing_docs)] mod lifecycle; +mod regex; mod rpc_client; -pub use lifecycle::InstallArgs; +pub use lifecycle::{InstallArgs, LogFilter}; +pub use regex::{RegexString, RegexSubstitution}; pub use rpc_client::{ - HttpHeader, OverrideProvider, Provider, ProviderId, RegexString, RegexSubstitution, RpcAccess, - RpcApi, RpcAuth, RpcService, SolDevnetService, SolMainnetService, SolanaCluster, + HttpHeader, OverrideProvider, Provider, ProviderId, RpcAccess, RpcApi, RpcAuth, RpcService, + SolDevnetService, SolMainnetService, SolanaCluster, }; diff --git a/libs/types/src/lifecycle/mod.rs b/libs/types/src/lifecycle/mod.rs index 3249c0b3..06542d14 100644 --- a/libs/types/src/lifecycle/mod.rs +++ b/libs/types/src/lifecycle/mod.rs @@ -1,6 +1,6 @@ -use crate::OverrideProvider; +use crate::{OverrideProvider, RegexString}; use candid::{CandidType, Principal}; -use serde::Deserialize; +use serde::{Deserialize, Serialize}; /// The installation args for the Solana RPC canister #[derive(Clone, Debug, Default, CandidType, Deserialize)] @@ -11,4 +11,21 @@ pub struct InstallArgs { /// Overrides the RPC providers' default URL and HTTP headers. #[serde(rename = "overrideProvider")] pub override_provider: Option, + /// Only log entries matching this filter will be recorded. + #[serde(rename = "logFilter")] + pub log_filter: Option, +} + +/// Only log entries matching this filter will be recorded. +#[derive(Clone, Debug, Default, PartialEq, Eq, CandidType, Serialize, Deserialize)] +pub enum LogFilter { + /// All log entries are recorded. + #[default] + ShowAll, + /// No log entries are recorded. + HideAll, + /// Only log entries matching this regular expression are recorded. + ShowPattern(RegexString), + /// Only log entries not matching this regular expression are recorded. + HidePattern(RegexString), } diff --git a/libs/types/src/regex/mod.rs b/libs/types/src/regex/mod.rs new file mode 100644 index 00000000..f5883f57 --- /dev/null +++ b/libs/types/src/regex/mod.rs @@ -0,0 +1,38 @@ +use candid::{CandidType, Deserialize}; +use regex::Regex; +use serde::Serialize; + +/// A string used as a regex pattern. +#[derive(Clone, Debug, PartialEq, Eq, CandidType, Serialize, Deserialize)] +pub struct RegexString(pub String); + +impl From<&str> for RegexString { + fn from(value: &str) -> Self { + RegexString(value.to_string()) + } +} + +impl RegexString { + /// Compile the string into a regular expression. + /// + /// This is a relatively expensive operation that's currently not cached. + pub fn compile(&self) -> Result { + Regex::new(&self.0) + } + + /// Checks if the given string matches the compiled regex pattern. + /// + /// Returns `Ok(true)` if `value` matches, `Ok(false)` if not, or an error if the regex is invalid. + pub fn try_is_valid(&self, value: &str) -> Result { + Ok(self.compile()?.is_match(value)) + } +} + +/// A regex-based substitution with a pattern and replacement string. +#[derive(Clone, Debug, PartialEq, Eq, CandidType, Serialize, Deserialize)] +pub struct RegexSubstitution { + /// The pattern to be matched. + pub pattern: RegexString, + /// The string to replace occurrences [`pattern`] with. + pub replacement: String, +} diff --git a/libs/types/src/rpc_client/mod.rs b/libs/types/src/rpc_client/mod.rs index 08afda4d..d0461ced 100644 --- a/libs/types/src/rpc_client/mod.rs +++ b/libs/types/src/rpc_client/mod.rs @@ -1,9 +1,9 @@ #[cfg(test)] mod tests; +use crate::regex::RegexSubstitution; use candid::CandidType; pub use ic_cdk::api::management_canister::http_request::HttpHeader; -use regex::Regex; use serde::{Deserialize, Serialize}; use std::fmt::Debug; use strum::VariantArray; @@ -182,41 +182,6 @@ pub enum RpcAuth { }, } -/// A string used as a regex pattern. -#[derive(Clone, Debug, PartialEq, Eq, CandidType, Serialize, Deserialize)] -pub struct RegexString(pub String); - -impl From<&str> for RegexString { - fn from(value: &str) -> Self { - RegexString(value.to_string()) - } -} - -impl RegexString { - /// Compile the string into a regular expression. - /// - /// This is a relatively expensive operation that's currently not cached. - pub fn compile(&self) -> Result { - Regex::new(&self.0) - } - - /// Checks if the given string matches the compiled regex pattern. - /// - /// Returns `Ok(true)` if `value` matches, `Ok(false)` if not, or an error if the regex is invalid. - pub fn try_is_valid(&self, value: &str) -> Result { - Ok(self.compile()?.is_match(value)) - } -} - -/// A regex-based substitution with a pattern and replacement string. -#[derive(Clone, Debug, PartialEq, Eq, CandidType, Serialize, Deserialize)] -pub struct RegexSubstitution { - /// The pattern to be matched. - pub pattern: RegexString, - /// The string to replace occurences [`pattern`] with. - pub replacement: String, -} - /// Allows modifying an [`RpcApi`]'s request URL and HTTP headers. /// /// Currently, the request URL is modified using the [`OverrideProvider::override_url`] regular From 0a05f3892eecc5e2f71a58a4f38c43ff34cb3b8a Mon Sep 17 00:00:00 2001 From: Louis Pahlavi Date: Thu, 20 Feb 2025 11:18:59 +0100 Subject: [PATCH 02/20] XC-286: Move logging functionality to separate crate --- Cargo.lock | 37 ++-- Cargo.toml | 4 +- canister/Cargo.toml | 4 +- canister/src/logs/mod.rs | 163 ++++-------------- canister/src/main.rs | 30 +--- canister/src/state/mod.rs | 3 +- canister/src/types/mod.rs | 93 +--------- canister/src/types/tests.rs | 38 +--- libs/client/Cargo.toml | 1 + libs/client/src/lib.rs | 7 +- libs/logs/CHANGELOG.md | 8 + libs/logs/Cargo.toml | 22 +++ libs/logs/LICENSE | 1 + libs/logs/README.md | 2 + libs/logs/src/lib.rs | 140 +++++++++++++++ {canister/src/logs => libs/logs/src}/tests.rs | 91 ++++++---- libs/logs/src/types/mod.rs | 67 +++++++ libs/types/Cargo.toml | 1 + libs/types/src/lib.rs | 2 +- libs/types/src/lifecycle/mod.rs | 19 +- 20 files changed, 387 insertions(+), 346 deletions(-) create mode 100644 libs/logs/CHANGELOG.md create mode 100644 libs/logs/Cargo.toml create mode 120000 libs/logs/LICENSE create mode 100644 libs/logs/README.md create mode 100644 libs/logs/src/lib.rs rename {canister/src/logs => libs/logs/src}/tests.rs (71%) create mode 100644 libs/logs/src/types/mod.rs diff --git a/Cargo.lock b/Cargo.lock index d3a04059..8e2e585d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -40,9 +40,9 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.95" +version = "1.0.96" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "34ac096ce696dc2fcabef30516bb13c0a68a11d30131d3df6f04711467681b04" +checksum = "6b964d184e89d9b6b67dd2715bc8e74cf3107fb2b529990c90cf517326150bf4" [[package]] name = "arbitrary" @@ -2648,9 +2648,9 @@ dependencies = [ [[package]] name = "serde" -version = "1.0.217" +version = "1.0.218" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "02fc4265df13d6fa1d00ecff087228cc0a2b5f3c0e87e258d8b94a156e984c70" +checksum = "e8dfc9d19bdbf6d17e22319da49161d5d0108e4188e8b680aef6299eed22df60" dependencies = [ "serde_derive", ] @@ -2685,9 +2685,9 @@ dependencies = [ [[package]] name = "serde_derive" -version = "1.0.217" +version = "1.0.218" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a9bf7cf98d04a2b28aead066b7496853d4779c9cc183c440dbac457641e19a0" +checksum = "f09503e191f4e797cb8aac08e9a4a4695c5edf6a2e70e376d961ddd5c969f82b" dependencies = [ "proc-macro2", "quote", @@ -2707,9 +2707,9 @@ dependencies = [ [[package]] name = "serde_json" -version = "1.0.138" +version = "1.0.139" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d434192e7da787e94a6ea7e9670b26a036d0ca41e0b7efb2676dd32bae872949" +checksum = "44f86c3acccc9c65b153fe1b85a3be07fe5515274ec9f0653b4a0875731c72a6" dependencies = [ "itoa", "memchr", @@ -2883,7 +2883,9 @@ dependencies = [ "serde", "serde_bytes", "serde_json", + "sol_rpc_logs", "sol_rpc_types", + "strum 0.27.1", "url", "zeroize", ] @@ -2899,6 +2901,7 @@ dependencies = [ "serde_bytes", "serde_json", "sol_rpc_canister", + "sol_rpc_logs", "sol_rpc_types", ] @@ -2917,6 +2920,19 @@ dependencies = [ "tokio", ] +[[package]] +name = "sol_rpc_logs" +version = "0.1.0" +dependencies = [ + "candid", + "ic-canister-log", + "ic-cdk", + "proptest", + "regex", + "serde", + "serde_json", +] + [[package]] name = "sol_rpc_types" version = "0.1.0" @@ -2925,6 +2941,7 @@ dependencies = [ "ic-cdk", "regex", "serde", + "sol_rpc_logs", "strum 0.27.1", "url", ] @@ -4601,9 +4618,9 @@ checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec" [[package]] name = "winnow" -version = "0.7.2" +version = "0.7.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "59690dea168f2198d1a3b0cac23b8063efcd11012f10ae4698f284808c8ef603" +checksum = "0e7f4ea97f6f78012141bcdb6a216b2609f0979ada50b20ca5b52dde2eac2bb1" dependencies = [ "memchr", ] diff --git a/Cargo.toml b/Cargo.toml index cafd7731..167cea74 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -3,9 +3,9 @@ resolver = "2" members = [ "canister", "integration_tests", "libs/client", + "libs/logs", "libs/types", - "examples/basic_solana" -] + "examples/basic_solana"] [workspace.package] authors = ["DFINITY Stiftung"] diff --git a/canister/Cargo.toml b/canister/Cargo.toml index ece94bcd..9743ee21 100644 --- a/canister/Cargo.toml +++ b/canister/Cargo.toml @@ -21,10 +21,12 @@ ic-canister-log = { workspace = true } ic-cdk = { workspace = true } ic-stable-structures = { workspace = true } sol_rpc_types = { path = "../libs/types" } +sol_rpc_logs = { path = "../libs/logs" } regex = { workspace = true } serde = { workspace = true } -serde_bytes = {workspace = true} +serde_bytes = { workspace = true } serde_json = { workspace = true } +strum = { workspace = true } url = { workspace = true } zeroize = { workspace = true } diff --git a/canister/src/logs/mod.rs b/canister/src/logs/mod.rs index eb0f0b82..afe03cb3 100644 --- a/canister/src/logs/mod.rs +++ b/canister/src/logs/mod.rs @@ -1,11 +1,10 @@ -#[cfg(test)] -mod tests; - use crate::state::read_state; use candid::CandidType; -use ic_canister_log::{declare_log_buffer, export as export_logs, GlobalBuffer, Sink}; +use ic_canister_log::{declare_log_buffer, GlobalBuffer}; use serde::Deserialize; +use sol_rpc_logs::{LogFilter, LogPriority, PrintProxySink}; use std::str::FromStr; +use strum::VariantArray; // High-priority messages. declare_log_buffer!(name = INFO_BUF, capacity = 1000); @@ -16,44 +15,44 @@ declare_log_buffer!(name = DEBUG_BUF, capacity = 1000); // Trace of HTTP requests and responses. declare_log_buffer!(name = TRACE_HTTP_BUF, capacity = 1000); -pub const INFO: PrintProxySink = PrintProxySink(&Priority::Info, &INFO_BUF); -pub const DEBUG: PrintProxySink = PrintProxySink(&Priority::Debug, &DEBUG_BUF); -pub const TRACE_HTTP: PrintProxySink = PrintProxySink(&Priority::TraceHttp, &TRACE_HTTP_BUF); - -#[derive(Debug)] -pub struct PrintProxySink(&'static Priority, &'static GlobalBuffer); +pub const INFO: PrintProxySink = PrintProxySink(&Priority::Info, &INFO_BUF); +pub const DEBUG: PrintProxySink = PrintProxySink(&Priority::Debug, &DEBUG_BUF); +pub const TRACE_HTTP: PrintProxySink = + PrintProxySink(&Priority::TraceHttp, &TRACE_HTTP_BUF); -impl Sink for PrintProxySink { - fn append(&self, entry: ic_canister_log::LogEntry) { - let message = format!( - "{} {}:{} {}", - self.0.as_str_uppercase(), - entry.file, - entry.line, - entry.message, - ); - if read_state(|state| state.get_log_filter()).is_match(&message) { - ic_cdk::println!("{}", message); - self.1.append(entry) - } - } -} - -#[derive(Copy, Clone, Debug, Eq, PartialEq, CandidType, Deserialize, serde::Serialize)] +#[derive( + Copy, Clone, Debug, Eq, PartialEq, CandidType, Deserialize, serde::Serialize, VariantArray, +)] pub enum Priority { Info, - TraceHttp, Debug, + TraceHttp, } -impl Priority { - pub fn as_str_uppercase(self) -> &'static str { +impl LogPriority for Priority { + fn get_buffer(&self) -> &'static GlobalBuffer { + match self { + Self::Info => &INFO_BUF, + Self::Debug => &DEBUG_BUF, + Self::TraceHttp => &TRACE_HTTP_BUF, + } + } + + fn as_str_uppercase(&self) -> &'static str { match self { - Priority::Info => "INFO", - Priority::TraceHttp => "TRACE_HTTP", - Priority::Debug => "DEBUG", + Self::Info => "INFO", + Self::TraceHttp => "TRACE_HTTP", + Self::Debug => "DEBUG", } } + + fn get_priorities() -> &'static [Priority] { + &Self::VARIANTS + } + + fn get_log_filter() -> LogFilter { + read_state(|state| state.get_log_filter()) + } } impl FromStr for Priority { @@ -68,101 +67,3 @@ impl FromStr for Priority { } } } - -#[derive(Copy, Clone, Debug, Deserialize, serde::Serialize)] -pub enum Sort { - Ascending, - Descending, -} - -impl FromStr for Sort { - type Err = String; - - fn from_str(s: &str) -> Result { - match s.to_lowercase().as_str() { - "asc" => Ok(Sort::Ascending), - "desc" => Ok(Sort::Descending), - _ => Err("could not recognize sort order".to_string()), - } - } -} - -#[derive(Clone, Debug, Eq, PartialEq, Deserialize, serde::Serialize)] -pub struct LogEntry { - pub timestamp: u64, - pub priority: Priority, - pub file: String, - pub line: u32, - pub message: String, - pub counter: u64, -} - -#[derive(Clone, Debug, Default, Deserialize, serde::Serialize)] -pub struct Log { - pub entries: Vec, -} - -impl Log { - pub fn push_logs(&mut self, priority: Priority) { - let logs = match priority { - Priority::Info => export_logs(&INFO_BUF), - Priority::TraceHttp => export_logs(&TRACE_HTTP_BUF), - Priority::Debug => export_logs(&DEBUG_BUF), - }; - for entry in logs { - self.entries.push(LogEntry { - timestamp: entry.timestamp, - counter: entry.counter, - priority, - file: entry.file.to_string(), - line: entry.line, - message: entry.message, - }); - } - } - - pub fn push_all(&mut self) { - self.push_logs(Priority::Info); - self.push_logs(Priority::TraceHttp); - self.push_logs(Priority::Debug); - } - - pub fn serialize_logs(&self, max_body_size: usize) -> String { - let mut entries_json: String = serde_json::to_string(&self).unwrap_or_default(); - - if entries_json.len() > max_body_size { - let mut left = 0; - let mut right = self.entries.len(); - - while left < right { - let mid = left + (right - left) / 2; - let mut temp_log = self.clone(); - temp_log.entries.truncate(mid); - let temp_entries_json = serde_json::to_string(&temp_log).unwrap_or_default(); - - if temp_entries_json.len() <= max_body_size { - entries_json = temp_entries_json; - left = mid + 1; - } else { - right = mid; - } - } - } - entries_json - } - - pub fn sort_logs(&mut self, sort_order: Sort) { - match sort_order { - Sort::Ascending => self.sort_asc(), - Sort::Descending => self.sort_desc(), - } - } - - pub fn sort_asc(&mut self) { - self.entries.sort_by(|a, b| a.timestamp.cmp(&b.timestamp)); - } - - pub fn sort_desc(&mut self) { - self.entries.sort_by(|a, b| b.timestamp.cmp(&a.timestamp)); - } -} diff --git a/canister/src/main.rs b/canister/src/main.rs index 3ca43d34..e29fa430 100644 --- a/canister/src/main.rs +++ b/canister/src/main.rs @@ -6,11 +6,14 @@ use ic_cdk::{ }; use sol_rpc_canister::{ http_types, lifecycle, + logs::Priority, logs::INFO, providers::{find_provider, PROVIDERS}, state::{mutate_state, read_state}, }; +use sol_rpc_logs::{Log, Sort}; use sol_rpc_types::{ProviderId, RpcAccess}; +use std::str::FromStr; pub fn require_api_key_principal_or_controller() -> Result<(), String> { let caller = ic_cdk::caller(); @@ -71,9 +74,6 @@ async fn update_api_keys(api_keys: Vec<(ProviderId, Option)>) { fn http_request(request: http_types::HttpRequest) -> http_types::HttpResponse { match request.path() { "/logs" => { - use sol_rpc_canister::logs::{Log, Priority, Sort}; - use std::str::FromStr; - let max_skip_timestamp = match request.raw_query_param("time") { Some(arg) => match u64::from_str(arg) { Ok(value) => value, @@ -86,7 +86,7 @@ fn http_request(request: http_types::HttpRequest) -> http_types::HttpResponse { None => 0, }; - let mut log: Log = Default::default(); + let mut log: Log = Default::default(); match request.raw_query_param("priority").map(Priority::from_str) { Some(Ok(priority)) => match priority { @@ -94,12 +94,7 @@ fn http_request(request: http_types::HttpRequest) -> http_types::HttpResponse { Priority::Debug => log.push_logs(Priority::Debug), Priority::TraceHttp => {} }, - Some(Err(_)) => { - return http_types::HttpResponseBuilder::bad_request() - .with_body_and_content_length("failed to parse the 'priority' parameter") - .build() - } - None => { + Some(Err(_)) | None => { log.push_logs(Priority::Info); log.push_logs(Priority::Debug); } @@ -109,18 +104,9 @@ fn http_request(request: http_types::HttpRequest) -> http_types::HttpResponse { .retain(|entry| entry.timestamp >= max_skip_timestamp); fn ordering_from_query_params(sort: Option<&str>, max_skip_timestamp: u64) -> Sort { - match sort { - Some(ord_str) => match Sort::from_str(ord_str) { - Ok(order) => order, - Err(_) => { - if max_skip_timestamp == 0 { - Sort::Ascending - } else { - Sort::Descending - } - } - }, - None => { + match sort.map(Sort::from_str) { + Some(Ok(order)) => order, + Some(Err(_)) | None => { if max_skip_timestamp == 0 { Sort::Ascending } else { diff --git a/canister/src/state/mod.rs b/canister/src/state/mod.rs index a32ef5c1..b8ceab3b 100644 --- a/canister/src/state/mod.rs +++ b/canister/src/state/mod.rs @@ -1,7 +1,7 @@ #[cfg(test)] mod tests; -use crate::types::{ApiKey, LogFilter, OverrideProvider}; +use crate::types::{ApiKey, OverrideProvider}; use candid::{Deserialize, Principal}; use ic_stable_structures::{ memory_manager::{MemoryId, MemoryManager, VirtualMemory}, @@ -9,6 +9,7 @@ use ic_stable_structures::{ Cell, DefaultMemoryImpl, Storable, }; use serde::Serialize; +use sol_rpc_logs::LogFilter; use sol_rpc_types::{InstallArgs, ProviderId}; use std::{borrow::Cow, cell::RefCell, collections::BTreeMap}; diff --git a/canister/src/types/mod.rs b/canister/src/types/mod.rs index d2dfcb5c..81c59c2c 100644 --- a/canister/src/types/mod.rs +++ b/canister/src/types/mod.rs @@ -1,15 +1,10 @@ #[cfg(test)] mod tests; -use crate::{ - constants::{API_KEY_MAX_SIZE, API_KEY_REPLACE_STRING, MESSAGE_FILTER_MAX_SIZE}, - rpc_client, - validate::validate_api_key, -}; -use ic_stable_structures::{storable::Bound, Storable}; +use crate::{constants::API_KEY_REPLACE_STRING, rpc_client, validate::validate_api_key}; use serde::{Deserialize, Serialize}; -use sol_rpc_types::{Provider, RegexString, RegexSubstitution, RpcApi}; -use std::{borrow::Cow, fmt}; +use sol_rpc_types::{Provider, RegexSubstitution, RpcApi}; +use std::fmt; use zeroize::{Zeroize, ZeroizeOnDrop}; pub enum ResolvedRpcService { @@ -56,21 +51,6 @@ impl TryFrom for ApiKey { } } -impl Storable for ApiKey { - fn to_bytes(&self) -> Cow<[u8]> { - self.0.to_bytes() - } - - fn from_bytes(bytes: Cow<[u8]>) -> Self { - Self(String::from_bytes(bytes)) - } - - const BOUND: Bound = Bound::Bounded { - max_size: API_KEY_MAX_SIZE, - is_fixed_size: false, - }; -} - /// Copy of [`sol_rpc_types::OverrideProvider`] to keep the implementation details out of the /// [`sol_rpc_types`] crate. #[derive(Clone, Debug, Default, PartialEq, Serialize, Deserialize)] @@ -113,70 +93,3 @@ impl OverrideProvider { } } } - -impl Storable for OverrideProvider { - fn to_bytes(&self) -> Cow<[u8]> { - serde_json::to_vec(self) - .expect("Error while serializing `OverrideProvider`") - .into() - } - - fn from_bytes(bytes: Cow<[u8]>) -> Self { - serde_json::from_slice(&bytes).expect("Error while deserializing `Storable`") - } - - const BOUND: Bound = Bound::Unbounded; -} - -/// Copy of [`sol_rpc_types::LogFilter`] to keep the implementation details out of the -/// [`sol_rpc_types`] crate. -#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize, Default)] -pub enum LogFilter { - #[default] - ShowAll, - HideAll, - ShowPattern(RegexString), - HidePattern(RegexString), -} - -impl From for LogFilter { - fn from(value: sol_rpc_types::LogFilter) -> Self { - match value { - sol_rpc_types::LogFilter::ShowAll => LogFilter::ShowAll, - sol_rpc_types::LogFilter::HideAll => LogFilter::HideAll, - sol_rpc_types::LogFilter::ShowPattern(regex) => LogFilter::ShowPattern(regex), - sol_rpc_types::LogFilter::HidePattern(regex) => LogFilter::HidePattern(regex), - } - } -} - -impl LogFilter { - pub fn is_match(&self, message: &str) -> bool { - match self { - Self::ShowAll => true, - Self::HideAll => false, - Self::ShowPattern(regex) => regex - .try_is_valid(message) - .expect("Invalid regex in ShowPattern log filter"), - Self::HidePattern(regex) => !regex - .try_is_valid(message) - .expect("Invalid regex in HidePattern log filter"), - } - } -} - -impl Storable for LogFilter { - fn to_bytes(&self) -> Cow<[u8]> { - serde_json::to_vec(self) - .expect("Error while serializing `LogFilter`") - .into() - } - fn from_bytes(bytes: Cow<[u8]>) -> Self { - serde_json::from_slice(&bytes).expect("Error while deserializing `LogFilter`") - } - - const BOUND: Bound = Bound::Bounded { - max_size: MESSAGE_FILTER_MAX_SIZE, - is_fixed_size: true, - }; -} diff --git a/canister/src/types/tests.rs b/canister/src/types/tests.rs index a95281c8..2ec089df 100644 --- a/canister/src/types/tests.rs +++ b/canister/src/types/tests.rs @@ -4,47 +4,11 @@ use crate::{ state::{init_state, State}, types::{ApiKey, OverrideProvider}, }; -use ic_stable_structures::Storable; use proptest::{ - option, prelude::{prop, Strategy}, proptest, }; -use sol_rpc_types::{HttpHeader, Provider, RegexString, RegexSubstitution, RpcApi}; -use std::fmt::Debug; - -mod encode_decode_tests { - use super::*; - - proptest! { - #[test] - fn should_encode_decode_override_provider(value in arb_override_provider()) { - test_encoding_decoding_roundtrip(&value); - } - } - - fn test_encoding_decoding_roundtrip(value: &T) { - let bytes = value.to_bytes(); - let decoded_value = T::from_bytes(bytes); - assert_eq!(value, &decoded_value); - } - - fn arb_regex() -> impl Strategy { - ".*".prop_map(|r| RegexString::from(r.as_str())) - } - - fn arb_regex_substitution() -> impl Strategy { - (arb_regex(), ".*").prop_map(|(pattern, replacement)| RegexSubstitution { - pattern, - replacement, - }) - } - - fn arb_override_provider() -> impl Strategy { - option::of(arb_regex_substitution()) - .prop_map(|override_url| OverrideProvider { override_url }) - } -} +use sol_rpc_types::{HttpHeader, Provider, RegexSubstitution, RpcApi}; mod override_provider_tests { use super::*; diff --git a/libs/client/Cargo.toml b/libs/client/Cargo.toml index 92736bb5..8a9042ee 100644 --- a/libs/client/Cargo.toml +++ b/libs/client/Cargo.toml @@ -17,5 +17,6 @@ ic-cdk = { workspace = true } serde = { workspace = true } serde_bytes = { workspace = true } serde_json = { workspace = true } +sol_rpc_logs = { path = "../logs" } sol_rpc_types = { path = "../types" } sol_rpc_canister = { path = "../../canister" } \ No newline at end of file diff --git a/libs/client/src/lib.rs b/libs/client/src/lib.rs index 275f2db6..7d7e031d 100644 --- a/libs/client/src/lib.rs +++ b/libs/client/src/lib.rs @@ -10,8 +10,9 @@ use ic_cdk::api::call::RejectionCode; use serde::de::DeserializeOwned; use sol_rpc_canister::{ http_types::{HttpRequest, HttpResponse}, - logs::{Log, LogEntry}, + logs::Priority, }; +use sol_rpc_logs::{Log, LogEntry}; use sol_rpc_types::ProviderId; /// Abstract the canister runtime so that the client code can be reused: @@ -98,7 +99,7 @@ impl SolRpcClient { } /// Retrieve logs from the SOL RPC canister from the HTTP endpoint. - pub async fn retrieve_logs(&self, priority: &str) -> Vec { + pub async fn retrieve_logs(&self, priority: &str) -> Vec> { let request = HttpRequest { method: "".to_string(), url: format!("/logs?priority={priority}"), @@ -110,7 +111,7 @@ impl SolRpcClient { .query_call(self.sol_rpc_canister, "http_request", (request,)) .await .unwrap(); - serde_json::from_slice::(&response.body) + serde_json::from_slice::>(&response.body) .expect("failed to parse SOL RPC canister logs") .entries } diff --git a/libs/logs/CHANGELOG.md b/libs/logs/CHANGELOG.md new file mode 100644 index 00000000..5fda63a8 --- /dev/null +++ b/libs/logs/CHANGELOG.md @@ -0,0 +1,8 @@ +Changelog + +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/), +and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). + +## [Unreleased] \ No newline at end of file diff --git a/libs/logs/Cargo.toml b/libs/logs/Cargo.toml new file mode 100644 index 00000000..47ca99cd --- /dev/null +++ b/libs/logs/Cargo.toml @@ -0,0 +1,22 @@ +[package] +name = "sol_rpc_logs" +version = "0.1.0" +description = "Crate for managing canister logs" +authors.workspace = true +edition.workspace = true +repository.workspace = true +homepage.workspace = true +license.workspace = true +readme = "README.md" +include = ["src", "Cargo.toml", "CHANGELOG.md", "LICENSE", "README.md"] + +[dependencies] +candid = { workspace = true } +ic-canister-log = { workspace = true } +ic-cdk = { workspace = true } +regex = { workspace = true } +serde = { workspace = true } +serde_json = { workspace = true } + +[dev-dependencies] +proptest = { workspace = true } diff --git a/libs/logs/LICENSE b/libs/logs/LICENSE new file mode 120000 index 00000000..30cff740 --- /dev/null +++ b/libs/logs/LICENSE @@ -0,0 +1 @@ +../../LICENSE \ No newline at end of file diff --git a/libs/logs/README.md b/libs/logs/README.md new file mode 100644 index 00000000..12204801 --- /dev/null +++ b/libs/logs/README.md @@ -0,0 +1,2 @@ +# Crate `sol_rpc_logs` + diff --git a/libs/logs/src/lib.rs b/libs/logs/src/lib.rs new file mode 100644 index 00000000..fb9f4cad --- /dev/null +++ b/libs/logs/src/lib.rs @@ -0,0 +1,140 @@ +#[cfg(test)] +mod tests; + +mod types; + +pub use crate::types::LogFilter; +use ic_canister_log::{export as export_logs, GlobalBuffer, Sink}; +use serde::Deserialize; +use std::str::FromStr; + +pub trait LogPriority { + fn get_buffer(&self) -> &'static GlobalBuffer; + fn as_str_uppercase(&self) -> &'static str; + fn get_priorities() -> &'static [Self] + where + Self: Sized; + fn get_log_filter() -> LogFilter; +} + +#[derive(Debug)] +pub struct PrintProxySink(pub &'static Priority, pub &'static GlobalBuffer); + +impl Sink for PrintProxySink { + fn append(&self, entry: ic_canister_log::LogEntry) { + let message = format!( + "{} {}:{} {}", + self.0.as_str_uppercase(), + entry.file, + entry.line, + entry.message, + ); + if Priority::get_log_filter().is_match(&message) { + ic_cdk::println!("{}", message); + self.1.append(entry) + } + } +} + +#[derive(Copy, Clone, Debug, Deserialize, serde::Serialize)] +pub enum Sort { + Ascending, + Descending, +} + +impl FromStr for Sort { + type Err = String; + + fn from_str(s: &str) -> Result { + match s.to_lowercase().as_str() { + "asc" => Ok(Sort::Ascending), + "desc" => Ok(Sort::Descending), + _ => Err("could not recognize sort order".to_string()), + } + } +} + +#[derive(Clone, Debug, Eq, PartialEq, Deserialize, serde::Serialize)] +pub struct LogEntry { + pub timestamp: u64, + pub priority: Priority, + pub file: String, + pub line: u32, + pub message: String, + pub counter: u64, +} + +#[derive(Clone, Debug, Deserialize, serde::Serialize)] +pub struct Log { + pub entries: Vec>, +} + +impl Default for Log { + fn default() -> Self { + Self { entries: vec![] } + } +} + +impl<'de, Priority> Log +where + Priority: LogPriority + Clone + Copy + Deserialize<'de> + serde::Serialize + 'static, +{ + pub fn push_logs(&mut self, priority: Priority) { + let logs = export_logs(priority.get_buffer()); + for entry in logs { + self.entries.push(LogEntry { + timestamp: entry.timestamp, + counter: entry.counter, + priority: priority.clone(), + file: entry.file.to_string(), + line: entry.line, + message: entry.message, + }); + } + } + + pub fn push_all(&mut self) { + Priority::get_priorities() + .iter() + .for_each(|priority| self.push_logs(*priority)); + } + + pub fn serialize_logs(&self, max_body_size: usize) -> String { + let mut entries_json: String = serde_json::to_string(&self).unwrap_or_default(); + + if entries_json.len() > max_body_size { + let mut left = 0; + let mut right = self.entries.len(); + + while left < right { + let mid = left + (right - left) / 2; + let mut temp_log = self.clone(); + temp_log.entries.truncate(mid); + let temp_entries_json = serde_json::to_string(&temp_log).unwrap_or_default(); + + if temp_entries_json.len() <= max_body_size { + entries_json = temp_entries_json; + left = mid + 1; + } else { + right = mid; + } + } + } + entries_json + } + + pub fn sort_logs(&mut self, sort_order: Sort) { + match sort_order { + Sort::Ascending => self.sort_asc(), + Sort::Descending => self.sort_desc(), + } + } + + pub fn sort_asc(&mut self) { + self.entries.sort_by(|a, b| a.timestamp.cmp(&b.timestamp)); + } + + pub fn sort_desc(&mut self) { + self.entries.sort_by(|a, b| b.timestamp.cmp(&a.timestamp)); + } +} diff --git a/canister/src/logs/tests.rs b/libs/logs/src/tests.rs similarity index 71% rename from canister/src/logs/tests.rs rename to libs/logs/src/tests.rs index 4aafe073..1cb0baf5 100644 --- a/canister/src/logs/tests.rs +++ b/libs/logs/src/tests.rs @@ -1,19 +1,51 @@ -use super::PrintProxySink; -use crate::{ - logs::{Log, LogEntry, Priority, Sort}, - state::{init_state, mutate_state, State}, - types::LogFilter, -}; -use ic_canister_log::{declare_log_buffer, export, log}; +use super::{Log, LogEntry, LogPriority, PrintProxySink, Sort}; +use crate::types::LogFilter; +use ic_canister_log::{declare_log_buffer, export, log, GlobalBuffer}; use proptest::{prop_assert, proptest}; +use serde::{Deserialize, Serialize}; +use std::cell::RefCell; + +thread_local! { + static LOG_FILTER: RefCell = RefCell::default(); +} declare_log_buffer!(name = INFO_TEST_BUF, capacity = 1000); -const INFO_TEST: PrintProxySink = PrintProxySink(&Priority::Info, &INFO_TEST_BUF); +const INFO_TEST: PrintProxySink = PrintProxySink(&TestPriority::Info, &INFO_TEST_BUF); + +#[derive(Clone, Copy, Serialize, Deserialize)] +enum TestPriority { + Info, +} + +impl LogPriority for TestPriority { + fn get_buffer(&self) -> &'static GlobalBuffer { + &INFO_TEST_BUF + } + + fn as_str_uppercase(&self) -> &'static str { + "INFO" + } + + fn get_priorities() -> &'static [Self] + where + Self: Sized, + { + &[TestPriority::Info] + } + + fn get_log_filter() -> LogFilter { + LOG_FILTER.with(|cell| cell.borrow().clone()) + } +} + +fn set_log_filter(filter: LogFilter) { + LOG_FILTER.set(filter); +} -fn info_log_entry_with_timestamp(timestamp: u64) -> LogEntry { +fn info_log_entry_with_timestamp(timestamp: u64) -> LogEntry { LogEntry { timestamp, - priority: Priority::Info, + priority: TestPriority::Info, file: String::default(), line: 0, message: String::default(), @@ -21,7 +53,7 @@ fn info_log_entry_with_timestamp(timestamp: u64) -> LogEntry { } } -fn is_ascending(log: &Log) -> bool { +fn is_ascending(log: &Log) -> bool { for i in 0..log.entries.len() - 1 { if log.entries[i].timestamp > log.entries[i + 1].timestamp { return false; @@ -30,7 +62,7 @@ fn is_ascending(log: &Log) -> bool { true } -fn is_descending(log: &Log) -> bool { +fn is_descending(log: &Log) -> bool { for i in 0..log.entries.len() - 1 { if log.entries[i].timestamp < log.entries[i + 1].timestamp { return false; @@ -49,15 +81,15 @@ fn get_messages() -> Vec { proptest! { #[test] fn logs_always_fit_in_message( - number_of_entries in (1..100_usize), - entry_size in (1..10000_usize), - max_body_size in (100..10000_usize) + number_of_entries in 1..100_usize, + entry_size in 1..10000_usize, + max_body_size in 100..10000_usize ) { - let mut entries: Vec = vec![]; + let mut entries: Vec> = vec![]; for _ in 0..number_of_entries { entries.push(LogEntry { timestamp: 0, - priority: Priority::Info, + priority: TestPriority::Info, file: String::default(), line: 0, message: "1".repeat(entry_size), @@ -91,13 +123,13 @@ fn sorting_order() { #[test] fn simple_logs_truncation() { - let mut entries: Vec = vec![]; + let mut entries: Vec> = vec![]; const MAX_BODY_SIZE: usize = 3_000_000; for _ in 0..10 { entries.push(LogEntry { timestamp: 0, - priority: Priority::Info, + priority: TestPriority::Info, file: String::default(), line: 0, message: String::default(), @@ -111,7 +143,7 @@ fn simple_logs_truncation() { entries.push(LogEntry { timestamp: 0, - priority: Priority::Info, + priority: TestPriority::Info, file: String::default(), line: 0, message: "1".repeat(MAX_BODY_SIZE), @@ -128,12 +160,12 @@ fn simple_logs_truncation() { #[test] fn one_entry_too_big() { - let mut entries: Vec = vec![]; + let mut entries: Vec> = vec![]; const MAX_BODY_SIZE: usize = 3_000_000; entries.push(LogEntry { timestamp: 0, - priority: Priority::Info, + priority: TestPriority::Info, file: String::default(), line: 0, message: "1".repeat(MAX_BODY_SIZE), @@ -172,8 +204,7 @@ fn should_truncate_last_entry() { #[test] fn should_show_all() { - init_state(State::default()); - mutate_state(|state| state.set_log_filter(LogFilter::ShowAll)); + set_log_filter(LogFilter::ShowAll); log!(INFO_TEST, "ABC"); log!(INFO_TEST, "123"); log!(INFO_TEST, "!@#"); @@ -182,8 +213,7 @@ fn should_show_all() { #[test] fn should_hide_all() { - init_state(State::default()); - mutate_state(|state| state.set_log_filter(LogFilter::HideAll)); + set_log_filter(LogFilter::HideAll); log!(INFO_TEST, "ABC"); log!(INFO_TEST, "123"); log!(INFO_TEST, "!@#"); @@ -192,8 +222,7 @@ fn should_hide_all() { #[test] fn should_show_pattern() { - init_state(State::default()); - mutate_state(|state| state.set_log_filter(LogFilter::ShowPattern("end$".into()))); + set_log_filter(LogFilter::ShowPattern("end$".into())); log!(INFO_TEST, "message"); log!(INFO_TEST, "message end"); log!(INFO_TEST, "end message"); @@ -202,8 +231,7 @@ fn should_show_pattern() { #[test] fn should_hide_pattern_including_message_type() { - init_state(State::default()); - mutate_state(|state| state.set_log_filter(LogFilter::ShowPattern("^INFO [^ ]* 123".into()))); + set_log_filter(LogFilter::ShowPattern("^INFO [^ ]* 123".into())); log!(INFO_TEST, "123"); log!(INFO_TEST, "INFO 123"); log!(INFO_TEST, ""); @@ -213,8 +241,7 @@ fn should_hide_pattern_including_message_type() { #[test] fn should_hide_pattern() { - init_state(State::default()); - mutate_state(|state| state.set_log_filter(LogFilter::HidePattern("[ABC]".into()))); + set_log_filter(LogFilter::HidePattern("[ABC]".into())); log!(INFO_TEST, "remove A"); log!(INFO_TEST, "...B..."); log!(INFO_TEST, "C"); diff --git a/libs/logs/src/types/mod.rs b/libs/logs/src/types/mod.rs new file mode 100644 index 00000000..b90b9400 --- /dev/null +++ b/libs/logs/src/types/mod.rs @@ -0,0 +1,67 @@ +use candid::CandidType; +use regex::Regex; +use serde::{Deserialize, Serialize}; + +/// A string used as a regex pattern. +#[derive(Clone, Debug, PartialEq, Eq, CandidType, Serialize, Deserialize)] +pub struct RegexString(pub String); + +impl From<&str> for RegexString { + fn from(value: &str) -> Self { + RegexString(value.to_string()) + } +} + +impl RegexString { + /// Compile the string into a regular expression. + /// + /// This is a relatively expensive operation that's currently not cached. + pub fn compile(&self) -> Result { + Regex::new(&self.0) + } + + /// Checks if the given string matches the compiled regex pattern. + /// + /// Returns `Ok(true)` if `value` matches, `Ok(false)` if not, or an error if the regex is invalid. + pub fn try_is_valid(&self, value: &str) -> Result { + Ok(self.compile()?.is_match(value)) + } +} + +/// A regex-based substitution with a pattern and replacement string. +#[derive(Clone, Debug, PartialEq, Eq, CandidType, Serialize, Deserialize)] +pub struct RegexSubstitution { + /// The pattern to be matched. + pub pattern: RegexString, + /// The string to replace occurrences [`pattern`] with. + pub replacement: String, +} + +/// Only log entries matching this filter will be recorded. +#[derive(Clone, Debug, Default, PartialEq, Eq, CandidType, Serialize, Deserialize)] +pub enum LogFilter { + /// All log entries are recorded. + #[default] + ShowAll, + /// No log entries are recorded. + HideAll, + /// Only log entries matching this regular expression are recorded. + ShowPattern(RegexString), + /// Only log entries not matching this regular expression are recorded. + HidePattern(RegexString), +} + +impl LogFilter { + pub fn is_match(&self, message: &str) -> bool { + match self { + Self::ShowAll => true, + Self::HideAll => false, + Self::ShowPattern(regex) => regex + .try_is_valid(message) + .expect("Invalid regex in ShowPattern log filter"), + Self::HidePattern(regex) => !regex + .try_is_valid(message) + .expect("Invalid regex in HidePattern log filter"), + } + } +} diff --git a/libs/types/Cargo.toml b/libs/types/Cargo.toml index b88786ca..32baecb8 100644 --- a/libs/types/Cargo.toml +++ b/libs/types/Cargo.toml @@ -15,5 +15,6 @@ candid = { workspace = true } ic-cdk = { workspace = true } regex = { workspace = true } serde = { workspace = true } +sol_rpc_logs = { path = "../logs" } strum = { workspace = true } url = { workspace = true } diff --git a/libs/types/src/lib.rs b/libs/types/src/lib.rs index 1dc33537..ea5d57e0 100644 --- a/libs/types/src/lib.rs +++ b/libs/types/src/lib.rs @@ -7,7 +7,7 @@ mod lifecycle; mod regex; mod rpc_client; -pub use lifecycle::{InstallArgs, LogFilter}; +pub use lifecycle::InstallArgs; pub use regex::{RegexString, RegexSubstitution}; pub use rpc_client::{ HttpHeader, OverrideProvider, Provider, ProviderId, RpcAccess, RpcApi, RpcAuth, RpcService, diff --git a/libs/types/src/lifecycle/mod.rs b/libs/types/src/lifecycle/mod.rs index 06542d14..7433b0d6 100644 --- a/libs/types/src/lifecycle/mod.rs +++ b/libs/types/src/lifecycle/mod.rs @@ -1,6 +1,7 @@ -use crate::{OverrideProvider, RegexString}; +use crate::OverrideProvider; use candid::{CandidType, Principal}; -use serde::{Deserialize, Serialize}; +use serde::Deserialize; +use sol_rpc_logs::LogFilter; /// The installation args for the Solana RPC canister #[derive(Clone, Debug, Default, CandidType, Deserialize)] @@ -15,17 +16,3 @@ pub struct InstallArgs { #[serde(rename = "logFilter")] pub log_filter: Option, } - -/// Only log entries matching this filter will be recorded. -#[derive(Clone, Debug, Default, PartialEq, Eq, CandidType, Serialize, Deserialize)] -pub enum LogFilter { - /// All log entries are recorded. - #[default] - ShowAll, - /// No log entries are recorded. - HideAll, - /// Only log entries matching this regular expression are recorded. - ShowPattern(RegexString), - /// Only log entries not matching this regular expression are recorded. - HidePattern(RegexString), -} From 542163752170503c39c1661874dfa50ebf7e313e Mon Sep 17 00:00:00 2001 From: Louis Pahlavi Date: Thu, 20 Feb 2025 11:22:53 +0100 Subject: [PATCH 03/20] XC-286: Move regex types back to rpc_client --- libs/types/src/lib.rs | 6 ++--- libs/types/src/regex/mod.rs | 38 -------------------------------- libs/types/src/rpc_client/mod.rs | 36 +++++++++++++++++++++++++++++- 3 files changed, 37 insertions(+), 43 deletions(-) delete mode 100644 libs/types/src/regex/mod.rs diff --git a/libs/types/src/lib.rs b/libs/types/src/lib.rs index ea5d57e0..f06f2b03 100644 --- a/libs/types/src/lib.rs +++ b/libs/types/src/lib.rs @@ -4,12 +4,10 @@ #![forbid(missing_docs)] mod lifecycle; -mod regex; mod rpc_client; pub use lifecycle::InstallArgs; -pub use regex::{RegexString, RegexSubstitution}; pub use rpc_client::{ - HttpHeader, OverrideProvider, Provider, ProviderId, RpcAccess, RpcApi, RpcAuth, RpcService, - SolDevnetService, SolMainnetService, SolanaCluster, + HttpHeader, OverrideProvider, Provider, ProviderId, RegexString, RegexSubstitution, RpcAccess, + RpcApi, RpcAuth, RpcService, SolDevnetService, SolMainnetService, SolanaCluster, }; diff --git a/libs/types/src/regex/mod.rs b/libs/types/src/regex/mod.rs deleted file mode 100644 index f5883f57..00000000 --- a/libs/types/src/regex/mod.rs +++ /dev/null @@ -1,38 +0,0 @@ -use candid::{CandidType, Deserialize}; -use regex::Regex; -use serde::Serialize; - -/// A string used as a regex pattern. -#[derive(Clone, Debug, PartialEq, Eq, CandidType, Serialize, Deserialize)] -pub struct RegexString(pub String); - -impl From<&str> for RegexString { - fn from(value: &str) -> Self { - RegexString(value.to_string()) - } -} - -impl RegexString { - /// Compile the string into a regular expression. - /// - /// This is a relatively expensive operation that's currently not cached. - pub fn compile(&self) -> Result { - Regex::new(&self.0) - } - - /// Checks if the given string matches the compiled regex pattern. - /// - /// Returns `Ok(true)` if `value` matches, `Ok(false)` if not, or an error if the regex is invalid. - pub fn try_is_valid(&self, value: &str) -> Result { - Ok(self.compile()?.is_match(value)) - } -} - -/// A regex-based substitution with a pattern and replacement string. -#[derive(Clone, Debug, PartialEq, Eq, CandidType, Serialize, Deserialize)] -pub struct RegexSubstitution { - /// The pattern to be matched. - pub pattern: RegexString, - /// The string to replace occurrences [`pattern`] with. - pub replacement: String, -} diff --git a/libs/types/src/rpc_client/mod.rs b/libs/types/src/rpc_client/mod.rs index d0461ced..aae90256 100644 --- a/libs/types/src/rpc_client/mod.rs +++ b/libs/types/src/rpc_client/mod.rs @@ -1,9 +1,9 @@ #[cfg(test)] mod tests; -use crate::regex::RegexSubstitution; use candid::CandidType; pub use ic_cdk::api::management_canister::http_request::HttpHeader; +use regex::Regex; use serde::{Deserialize, Serialize}; use std::fmt::Debug; use strum::VariantArray; @@ -181,6 +181,40 @@ pub enum RpcAuth { url_pattern: String, }, } +/// A string used as a regex pattern. +#[derive(Clone, Debug, PartialEq, Eq, CandidType, Serialize, Deserialize)] +pub struct RegexString(pub String); + +impl From<&str> for RegexString { + fn from(value: &str) -> Self { + RegexString(value.to_string()) + } +} + +impl RegexString { + /// Compile the string into a regular expression. + /// + /// This is a relatively expensive operation that's currently not cached. + pub fn compile(&self) -> Result { + Regex::new(&self.0) + } + + /// Checks if the given string matches the compiled regex pattern. + /// + /// Returns `Ok(true)` if `value` matches, `Ok(false)` if not, or an error if the regex is invalid. + pub fn try_is_valid(&self, value: &str) -> Result { + Ok(self.compile()?.is_match(value)) + } +} + +/// A regex-based substitution with a pattern and replacement string. +#[derive(Clone, Debug, PartialEq, Eq, CandidType, Serialize, Deserialize)] +pub struct RegexSubstitution { + /// The pattern to be matched. + pub pattern: RegexString, + /// The string to replace occurrences [`pattern`] with. + pub replacement: String, +} /// Allows modifying an [`RpcApi`]'s request URL and HTTP headers. /// From 6849a86363083435c6428af331221e5568e2c29c Mon Sep 17 00:00:00 2001 From: Louis Pahlavi Date: Thu, 20 Feb 2025 11:30:08 +0100 Subject: [PATCH 04/20] XC-286: Clippy --- libs/logs/src/lib.rs | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/libs/logs/src/lib.rs b/libs/logs/src/lib.rs index fb9f4cad..59396138 100644 --- a/libs/logs/src/lib.rs +++ b/libs/logs/src/lib.rs @@ -5,7 +5,7 @@ mod types; pub use crate::types::LogFilter; use ic_canister_log::{export as export_logs, GlobalBuffer, Sink}; -use serde::Deserialize; +use serde::{Deserialize, Serialize}; use std::str::FromStr; pub trait LogPriority { @@ -77,7 +77,7 @@ impl Default for Log { impl<'de, Priority> Log where - Priority: LogPriority + Clone + Copy + Deserialize<'de> + serde::Serialize + 'static, + Priority: LogPriority + Clone + Copy + Deserialize<'de> + Serialize + 'static, { pub fn push_logs(&mut self, priority: Priority) { let logs = export_logs(priority.get_buffer()); @@ -85,7 +85,7 @@ where self.entries.push(LogEntry { timestamp: entry.timestamp, counter: entry.counter, - priority: priority.clone(), + priority, file: entry.file.to_string(), line: entry.line, message: entry.message, From 634fa265f6670f80e495e7bb5bba35941888f3ea Mon Sep 17 00:00:00 2001 From: Louis Pahlavi Date: Thu, 20 Feb 2025 11:31:53 +0100 Subject: [PATCH 05/20] XC-286: Remove unused constant --- canister/src/constants.rs | 3 +-- canister/src/validate/mod.rs | 2 +- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/canister/src/constants.rs b/canister/src/constants.rs index f8ad89c7..744e29ff 100644 --- a/canister/src/constants.rs +++ b/canister/src/constants.rs @@ -1,5 +1,4 @@ pub const API_KEY_REPLACE_STRING: &str = "{API_KEY}"; -pub const API_KEY_MAX_SIZE: u32 = 512; -pub const MESSAGE_FILTER_MAX_SIZE: u32 = 1000; +pub const API_KEY_MAX_SIZE: usize = 512; pub const VALID_API_KEY_CHARS: &str = "0123456789ABCDEFGHIJKLMNOPQRTSUVWXYZabcdefghijklmnopqrstuvwxyz$-_.+!*"; diff --git a/canister/src/validate/mod.rs b/canister/src/validate/mod.rs index ff3e5d12..c501b67e 100644 --- a/canister/src/validate/mod.rs +++ b/canister/src/validate/mod.rs @@ -10,7 +10,7 @@ const API_KEY_TOO_LONG_ERROR_MESSAGE: &str = pub fn validate_api_key(api_key: &str) -> Result<(), &'static str> { if api_key.is_empty() { Err("API key must not be an empty string") - } else if api_key.as_bytes().len() as u32 > API_KEY_MAX_SIZE { + } else if api_key.as_bytes().len() > API_KEY_MAX_SIZE { Err(API_KEY_TOO_LONG_ERROR_MESSAGE) } else if api_key .chars() From 3db90e4604636536d810ad0e330279a576c248cb Mon Sep 17 00:00:00 2001 From: Louis Pahlavi Date: Thu, 20 Feb 2025 11:38:13 +0100 Subject: [PATCH 06/20] XC-286: Clippy --- canister/src/lifecycle/mod.rs | 2 +- canister/src/logs/mod.rs | 2 +- canister/src/state/mod.rs | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/canister/src/lifecycle/mod.rs b/canister/src/lifecycle/mod.rs index 48704e5d..32b2d5e3 100644 --- a/canister/src/lifecycle/mod.rs +++ b/canister/src/lifecycle/mod.rs @@ -23,7 +23,7 @@ pub fn post_upgrade(args: Option) { mutate_state(|s| s.set_override_provider(override_provider.into())); } if let Some(log_filter) = args.log_filter { - mutate_state(|s| s.set_log_filter(log_filter.into())); + mutate_state(|s| s.set_log_filter(log_filter)); } } } diff --git a/canister/src/logs/mod.rs b/canister/src/logs/mod.rs index afe03cb3..e515664a 100644 --- a/canister/src/logs/mod.rs +++ b/canister/src/logs/mod.rs @@ -47,7 +47,7 @@ impl LogPriority for Priority { } fn get_priorities() -> &'static [Priority] { - &Self::VARIANTS + Self::VARIANTS } fn get_log_filter() -> LogFilter { diff --git a/canister/src/state/mod.rs b/canister/src/state/mod.rs index b8ceab3b..0fb205cd 100644 --- a/canister/src/state/mod.rs +++ b/canister/src/state/mod.rs @@ -139,7 +139,7 @@ impl From for State { api_keys: Default::default(), api_key_principals: value.manage_api_keys.unwrap_or_default(), override_provider: value.override_provider.unwrap_or_default().into(), - log_filter: value.log_filter.unwrap_or_default().into(), + log_filter: value.log_filter.unwrap_or_default(), } } } From ff747bd1f7393735d425fb3c67c2ee59114bf182 Mon Sep 17 00:00:00 2001 From: Louis Pahlavi Date: Thu, 20 Feb 2025 13:44:30 +0100 Subject: [PATCH 07/20] XC-286: Declare log buffers within macro --- Cargo.lock | 3 +- Cargo.toml | 1 + canister/Cargo.toml | 2 +- canister/src/logs/mod.rs | 55 ++++++------------------------------- libs/logs/Cargo.toml | 1 + libs/logs/src/lib.rs | 59 +++++++++++++++++++++++++++++++++++++--- libs/logs/src/tests.rs | 38 +++++++------------------- 7 files changed, 78 insertions(+), 81 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 8e2e585d..cab28869 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2878,11 +2878,11 @@ dependencies = [ "ic-canister-log", "ic-cdk", "ic-stable-structures", + "paste", "proptest", "regex", "serde", "serde_bytes", - "serde_json", "sol_rpc_logs", "sol_rpc_types", "strum 0.27.1", @@ -2927,6 +2927,7 @@ dependencies = [ "candid", "ic-canister-log", "ic-cdk", + "paste", "proptest", "regex", "serde", diff --git a/Cargo.toml b/Cargo.toml index 167cea74..d0fad4fe 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -40,6 +40,7 @@ ic-cdk = "0.17.1" ic-ed25519 = "0.1.0" ic-stable-structures = "0.6.7" ic-test-utilities-load-wasm = { git = "https://github.com/dfinity/ic", tag = "release-2025-01-23_03-04-base" } +paste = "1.0.15" pocket-ic = "6.0.0" proptest = "1.6.0" regex = "1.11.1" diff --git a/canister/Cargo.toml b/canister/Cargo.toml index 9743ee21..bfe2d7b4 100644 --- a/canister/Cargo.toml +++ b/canister/Cargo.toml @@ -25,10 +25,10 @@ sol_rpc_logs = { path = "../libs/logs" } regex = { workspace = true } serde = { workspace = true } serde_bytes = { workspace = true } -serde_json = { workspace = true } strum = { workspace = true } url = { workspace = true } zeroize = { workspace = true } +paste = "1.0.15" [dev-dependencies] candid_parser = { workspace = true } diff --git a/canister/src/logs/mod.rs b/canister/src/logs/mod.rs index e515664a..0405a8f2 100644 --- a/canister/src/logs/mod.rs +++ b/canister/src/logs/mod.rs @@ -1,55 +1,16 @@ use crate::state::read_state; -use candid::CandidType; -use ic_canister_log::{declare_log_buffer, GlobalBuffer}; -use serde::Deserialize; -use sol_rpc_logs::{LogFilter, LogPriority, PrintProxySink}; +use sol_rpc_logs::{declare_log_priorities, GetLogFilter, LogFilter}; use std::str::FromStr; -use strum::VariantArray; -// High-priority messages. -declare_log_buffer!(name = INFO_BUF, capacity = 1000); - -// Low-priority info messages. -declare_log_buffer!(name = DEBUG_BUF, capacity = 1000); - -// Trace of HTTP requests and responses. -declare_log_buffer!(name = TRACE_HTTP_BUF, capacity = 1000); - -pub const INFO: PrintProxySink = PrintProxySink(&Priority::Info, &INFO_BUF); -pub const DEBUG: PrintProxySink = PrintProxySink(&Priority::Debug, &DEBUG_BUF); -pub const TRACE_HTTP: PrintProxySink = - PrintProxySink(&Priority::TraceHttp, &TRACE_HTTP_BUF); - -#[derive( - Copy, Clone, Debug, Eq, PartialEq, CandidType, Deserialize, serde::Serialize, VariantArray, -)] -pub enum Priority { - Info, - Debug, - TraceHttp, -} - -impl LogPriority for Priority { - fn get_buffer(&self) -> &'static GlobalBuffer { - match self { - Self::Info => &INFO_BUF, - Self::Debug => &DEBUG_BUF, - Self::TraceHttp => &TRACE_HTTP_BUF, - } - } - - fn as_str_uppercase(&self) -> &'static str { - match self { - Self::Info => "INFO", - Self::TraceHttp => "TRACE_HTTP", - Self::Debug => "DEBUG", - } - } - - fn get_priorities() -> &'static [Priority] { - Self::VARIANTS +declare_log_priorities! { + pub enum Priority { + Info(1000, INFO), + Debug(1000, DEBUG), + TraceHttp(1000, TRACE_HTTP) } +} +impl GetLogFilter for Priority { fn get_log_filter() -> LogFilter { read_state(|state| state.get_log_filter()) } diff --git a/libs/logs/Cargo.toml b/libs/logs/Cargo.toml index 47ca99cd..a8a671a3 100644 --- a/libs/logs/Cargo.toml +++ b/libs/logs/Cargo.toml @@ -14,6 +14,7 @@ include = ["src", "Cargo.toml", "CHANGELOG.md", "LICENSE", "README.md"] candid = { workspace = true } ic-canister-log = { workspace = true } ic-cdk = { workspace = true } +paste = { workspace = true } regex = { workspace = true } serde = { workspace = true } serde_json = { workspace = true } diff --git a/libs/logs/src/lib.rs b/libs/logs/src/lib.rs index 59396138..d45eff17 100644 --- a/libs/logs/src/lib.rs +++ b/libs/logs/src/lib.rs @@ -8,19 +8,70 @@ use ic_canister_log::{export as export_logs, GlobalBuffer, Sink}; use serde::{Deserialize, Serialize}; use std::str::FromStr; +#[derive(Debug)] +pub struct PrintProxySink(pub &'static Priority, pub &'static GlobalBuffer); + pub trait LogPriority { fn get_buffer(&self) -> &'static GlobalBuffer; fn as_str_uppercase(&self) -> &'static str; fn get_priorities() -> &'static [Self] where Self: Sized; - fn get_log_filter() -> LogFilter; } -#[derive(Debug)] -pub struct PrintProxySink(pub &'static Priority, pub &'static GlobalBuffer); +#[macro_export] +macro_rules! declare_log_priorities { + ( + pub enum $enum_name:ident { + $($variant:ident($capacity:expr, $uppercase:expr)),* + } + ) => { + // Declare the log priority enum + #[derive(Copy, Clone, Debug, Eq, PartialEq, candid::CandidType, serde::Deserialize, serde::Serialize)] + pub enum $enum_name { + $($variant),* + } + + // Declare the buffers for each log priority level + $(paste::paste! { + ic_canister_log::declare_log_buffer!(name = [<$uppercase _BUF>], capacity = $capacity); + pub const $uppercase: $crate::PrintProxySink<$enum_name> = $crate::PrintProxySink(&$enum_name::$variant, &[<$uppercase _BUF>]); + })* + + // Array containing all enum variants + impl $enum_name { + const VARIANTS: &'static [Self] = &[ + $(Self::$variant),* + ]; + } + + // Implement some methods for the priority enum + impl $crate::LogPriority for $enum_name { + + fn get_buffer(&self) -> &'static ic_canister_log::GlobalBuffer { + match self { + $(Self::$variant => &paste::paste!([<$uppercase _BUF>]),)* + } + } + + fn as_str_uppercase(&self) -> &'static str { + match self { + $(Self::$variant => stringify!($uppercase),)* + } + } + + fn get_priorities() -> &'static [Self] { + Self::VARIANTS + } + } + }; +} + +pub trait GetLogFilter { + fn get_log_filter() -> LogFilter; +} -impl Sink for PrintProxySink { +impl Sink for PrintProxySink { fn append(&self, entry: ic_canister_log::LogEntry) { let message = format!( "{} {}:{} {}", diff --git a/libs/logs/src/tests.rs b/libs/logs/src/tests.rs index 1cb0baf5..9dda932a 100644 --- a/libs/logs/src/tests.rs +++ b/libs/logs/src/tests.rs @@ -1,38 +1,20 @@ -use super::{Log, LogEntry, LogPriority, PrintProxySink, Sort}; +use super::{declare_log_priorities, GetLogFilter, Log, LogEntry, LogPriority, Sort}; use crate::types::LogFilter; -use ic_canister_log::{declare_log_buffer, export, log, GlobalBuffer}; +use ic_canister_log::{export, log}; use proptest::{prop_assert, proptest}; -use serde::{Deserialize, Serialize}; use std::cell::RefCell; thread_local! { static LOG_FILTER: RefCell = RefCell::default(); } -declare_log_buffer!(name = INFO_TEST_BUF, capacity = 1000); -const INFO_TEST: PrintProxySink = PrintProxySink(&TestPriority::Info, &INFO_TEST_BUF); - -#[derive(Clone, Copy, Serialize, Deserialize)] -enum TestPriority { - Info, -} - -impl LogPriority for TestPriority { - fn get_buffer(&self) -> &'static GlobalBuffer { - &INFO_TEST_BUF - } - - fn as_str_uppercase(&self) -> &'static str { - "INFO" - } - - fn get_priorities() -> &'static [Self] - where - Self: Sized, - { - &[TestPriority::Info] +declare_log_priorities! { + pub enum TestPriority { + Info(1000, INFO_TEST) } +} +impl GetLogFilter for TestPriority { fn get_log_filter() -> LogFilter { LOG_FILTER.with(|cell| cell.borrow().clone()) } @@ -72,7 +54,7 @@ fn is_descending(log: &Log) -> bool { } fn get_messages() -> Vec { - export(&INFO_TEST_BUF) + export(&TestPriority::Info.get_buffer()) .into_iter() .map(|entry| entry.message) .collect() @@ -231,9 +213,9 @@ fn should_show_pattern() { #[test] fn should_hide_pattern_including_message_type() { - set_log_filter(LogFilter::ShowPattern("^INFO [^ ]* 123".into())); + set_log_filter(LogFilter::ShowPattern("^INFO_TEST [^ ]* 123".into())); log!(INFO_TEST, "123"); - log!(INFO_TEST, "INFO 123"); + log!(INFO_TEST, "INFO_TEST 123"); log!(INFO_TEST, ""); log!(INFO_TEST, "123456"); assert_eq!(get_messages(), vec!["123", "123456"]); From dcffe42aceb038c90996426ad40f82ff5e4c97f1 Mon Sep 17 00:00:00 2001 From: Louis Pahlavi Date: Thu, 20 Feb 2025 13:51:19 +0100 Subject: [PATCH 08/20] XC-286: Clippy --- libs/logs/src/tests.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/libs/logs/src/tests.rs b/libs/logs/src/tests.rs index 9dda932a..0e82cb3a 100644 --- a/libs/logs/src/tests.rs +++ b/libs/logs/src/tests.rs @@ -54,7 +54,7 @@ fn is_descending(log: &Log) -> bool { } fn get_messages() -> Vec { - export(&TestPriority::Info.get_buffer()) + export(TestPriority::Info.get_buffer()) .into_iter() .map(|entry| entry.message) .collect() From d35a21bfeae3fa461431ad316afa6e8e0749f5e2 Mon Sep 17 00:00:00 2001 From: Louis Pahlavi Date: Thu, 20 Feb 2025 14:34:28 +0100 Subject: [PATCH 09/20] XC-286: Add some documentation --- canister/src/logs/mod.rs | 6 ++--- libs/logs/src/lib.rs | 57 +++++++++++++++++++++++++++++++--------- libs/logs/src/tests.rs | 2 +- 3 files changed, 49 insertions(+), 16 deletions(-) diff --git a/canister/src/logs/mod.rs b/canister/src/logs/mod.rs index 0405a8f2..456464c7 100644 --- a/canister/src/logs/mod.rs +++ b/canister/src/logs/mod.rs @@ -4,9 +4,9 @@ use std::str::FromStr; declare_log_priorities! { pub enum Priority { - Info(1000, INFO), - Debug(1000, DEBUG), - TraceHttp(1000, TRACE_HTTP) + Info(capacity = 1000, buffer = INFO), + Debug(capacity = 1000, buffer = DEBUG), + TraceHttp(capacity = 1000, buffer = TRACE_HTTP) } } diff --git a/libs/logs/src/lib.rs b/libs/logs/src/lib.rs index d45eff17..8407a160 100644 --- a/libs/logs/src/lib.rs +++ b/libs/logs/src/lib.rs @@ -8,22 +8,33 @@ use ic_canister_log::{export as export_logs, GlobalBuffer, Sink}; use serde::{Deserialize, Serialize}; use std::str::FromStr; -#[derive(Debug)] -pub struct PrintProxySink(pub &'static Priority, pub &'static GlobalBuffer); - -pub trait LogPriority { - fn get_buffer(&self) -> &'static GlobalBuffer; - fn as_str_uppercase(&self) -> &'static str; - fn get_priorities() -> &'static [Self] - where - Self: Sized; -} - +/// Use this macro to declare en enum containing priority levels and the corresponding +/// buffers as defined in the [`ic_canister_log`]. The resulting log priority automatically +/// implements the [`LogPriority`] trait. +/// +/// The [`GetLogFilter`] trait should be implemented manually for the resulting enum. +/// +/// # Example +/// ```rust +/// use ic_canister_log::log; +/// use sol_rpc_logs::declare_log_priorities; +/// +/// // Each log priority is defined here with a capacity of 1000 +/// declare_log_priorities! { +/// pub enum Priority { +/// Info(capacity = 1000, buffer = INFO), +/// Debug(capacity = 1000, buffer = DEBUG) +/// } +/// } +/// +/// log!(INFO, "Some noteworthy event"); +/// log!(DEBUG, "A less noteworthy event"); +/// ``` #[macro_export] macro_rules! declare_log_priorities { ( pub enum $enum_name:ident { - $($variant:ident($capacity:expr, $uppercase:expr)),* + $($variant:ident(capacity = $capacity:expr, buffer = $uppercase:expr)),* } ) => { // Declare the log priority enum @@ -67,10 +78,32 @@ macro_rules! declare_log_priorities { }; } +/// Represents a log priority level. This trait is meant to be implemented +/// automatically by the [`declare_log_priorities!`] macro. +pub trait LogPriority { + /// Returns a reference to the [`GlobalBuffer`] where the log entries are stored. + fn get_buffer(&self) -> &'static GlobalBuffer; + + /// Returns an uppercase `&str` representing a log priority level. + fn as_str_uppercase(&self) -> &'static str; + + /// Returns an array containing all the log priority levels. + fn get_priorities() -> &'static [Self] + where + Self: Sized; +} + +/// Returns the [`LogFilter`] to check what entries to record. This trait should +/// be implemented manually for the log priority level enum generated by the +/// [`declare_log_priorities!`] macro. pub trait GetLogFilter { + /// Returns a [`LogFilter`]. Only log entries matching this filter will be recorded. fn get_log_filter() -> LogFilter; } +#[derive(Debug)] +pub struct PrintProxySink(pub &'static Priority, pub &'static GlobalBuffer); + impl Sink for PrintProxySink { fn append(&self, entry: ic_canister_log::LogEntry) { let message = format!( diff --git a/libs/logs/src/tests.rs b/libs/logs/src/tests.rs index 0e82cb3a..e3996f00 100644 --- a/libs/logs/src/tests.rs +++ b/libs/logs/src/tests.rs @@ -10,7 +10,7 @@ thread_local! { declare_log_priorities! { pub enum TestPriority { - Info(1000, INFO_TEST) + Info(capacity = 1000, buffer = INFO_TEST) } } From cd15e03e8616f233b29364870e10a44a4dfc03f0 Mon Sep 17 00:00:00 2001 From: Louis Pahlavi Date: Thu, 20 Feb 2025 15:06:51 +0100 Subject: [PATCH 10/20] XC-286: Add some more documentation --- libs/logs/src/lib.rs | 59 ++++++++++++++++++++++---------------- libs/logs/src/tests.rs | 4 +-- libs/logs/src/types/mod.rs | 25 ++++++++++++++++ 3 files changed, 61 insertions(+), 27 deletions(-) diff --git a/libs/logs/src/lib.rs b/libs/logs/src/lib.rs index 8407a160..b7efa2f3 100644 --- a/libs/logs/src/lib.rs +++ b/libs/logs/src/lib.rs @@ -1,12 +1,16 @@ +//! Crate for managing canister logs + +#![forbid(unsafe_code)] +#![forbid(missing_docs)] + #[cfg(test)] mod tests; mod types; -pub use crate::types::LogFilter; +pub use crate::types::{LogFilter, Sort}; use ic_canister_log::{export as export_logs, GlobalBuffer, Sink}; use serde::{Deserialize, Serialize}; -use std::str::FromStr; /// Use this macro to declare en enum containing priority levels and the corresponding /// buffers as defined in the [`ic_canister_log`]. The resulting log priority automatically @@ -17,7 +21,7 @@ use std::str::FromStr; /// # Example /// ```rust /// use ic_canister_log::log; -/// use sol_rpc_logs::declare_log_priorities; +/// use sol_rpc_logs::{declare_log_priorities, GetLogFilter, LogFilter}; /// /// // Each log priority is defined here with a capacity of 1000 /// declare_log_priorities! { @@ -27,6 +31,12 @@ use std::str::FromStr; /// } /// } /// +/// impl GetLogFilter for Priority { +/// fn get_log_filter() -> LogFilter { +/// LogFilter::ShowAll +/// } +/// } +/// /// log!(INFO, "Some noteworthy event"); /// log!(DEBUG, "A less noteworthy event"); /// ``` @@ -101,6 +111,7 @@ pub trait GetLogFilter { fn get_log_filter() -> LogFilter; } +/// Defines how log entries are displayed and appended to the corresponding [`GlobalBuffer`]. #[derive(Debug)] pub struct PrintProxySink(pub &'static Priority, pub &'static GlobalBuffer); @@ -120,36 +131,27 @@ impl Sink for PrintProxySink { } } -#[derive(Copy, Clone, Debug, Deserialize, serde::Serialize)] -pub enum Sort { - Ascending, - Descending, -} - -impl FromStr for Sort { - type Err = String; - - fn from_str(s: &str) -> Result { - match s.to_lowercase().as_str() { - "asc" => Ok(Sort::Ascending), - "desc" => Ok(Sort::Descending), - _ => Err("could not recognize sort order".to_string()), - } - } -} - +/// A single log entry. #[derive(Clone, Debug, Eq, PartialEq, Deserialize, serde::Serialize)] pub struct LogEntry { + /// The time at which the log entry is recorded. pub timestamp: u64, + /// The log entry priority level. pub priority: Priority, + /// The source file in which this log entry was generated. pub file: String, + /// The line in [`file`] in which this log entry was generated. pub line: u32, + /// The log message. pub message: String, + /// The index of this entry starting from the last canister upgrade. pub counter: u64, } +/// A container for log entries at a given log priority level. #[derive(Clone, Debug, Deserialize, serde::Serialize)] pub struct Log { + /// The log entries for this priority level. pub entries: Vec>, } @@ -163,9 +165,9 @@ impl<'de, Priority> Log where Priority: LogPriority + Clone + Copy + Deserialize<'de> + Serialize + 'static, { + /// Append all the entries from the given [`Priority`] to [`entries`]. pub fn push_logs(&mut self, priority: Priority) { - let logs = export_logs(priority.get_buffer()); - for entry in logs { + for entry in export_logs(priority.get_buffer()) { self.entries.push(LogEntry { timestamp: entry.timestamp, counter: entry.counter, @@ -177,12 +179,18 @@ where } } + /// Append all the entries from all priority levels to [`entries`]. pub fn push_all(&mut self) { Priority::get_priorities() .iter() .for_each(|priority| self.push_logs(*priority)); } + /// Serialize the logs contained in `entries` into a JSON string. + /// + /// If the resulting string is larger than `max_body_size` bytes, + /// truncate `entries` so the resulting serialized JSON string + /// contains no more than `max_body_size` bytes. pub fn serialize_logs(&self, max_body_size: usize) -> String { let mut entries_json: String = serde_json::to_string(&self).unwrap_or_default(); @@ -207,6 +215,7 @@ where entries_json } + /// Sort the log entries according `sort_order`. pub fn sort_logs(&mut self, sort_order: Sort) { match sort_order { Sort::Ascending => self.sort_asc(), @@ -214,11 +223,11 @@ where } } - pub fn sort_asc(&mut self) { + fn sort_asc(&mut self) { self.entries.sort_by(|a, b| a.timestamp.cmp(&b.timestamp)); } - pub fn sort_desc(&mut self) { + fn sort_desc(&mut self) { self.entries.sort_by(|a, b| b.timestamp.cmp(&a.timestamp)); } } diff --git a/libs/logs/src/tests.rs b/libs/logs/src/tests.rs index e3996f00..48671145 100644 --- a/libs/logs/src/tests.rs +++ b/libs/logs/src/tests.rs @@ -1,5 +1,5 @@ -use super::{declare_log_priorities, GetLogFilter, Log, LogEntry, LogPriority, Sort}; -use crate::types::LogFilter; +use super::{declare_log_priorities, GetLogFilter, Log, LogEntry, LogPriority}; +use crate::types::{LogFilter, Sort}; use ic_canister_log::{export, log}; use proptest::{prop_assert, proptest}; use std::cell::RefCell; diff --git a/libs/logs/src/types/mod.rs b/libs/logs/src/types/mod.rs index b90b9400..256c0dae 100644 --- a/libs/logs/src/types/mod.rs +++ b/libs/logs/src/types/mod.rs @@ -1,6 +1,7 @@ use candid::CandidType; use regex::Regex; use serde::{Deserialize, Serialize}; +use std::str::FromStr; /// A string used as a regex pattern. #[derive(Clone, Debug, PartialEq, Eq, CandidType, Serialize, Deserialize)] @@ -52,6 +53,7 @@ pub enum LogFilter { } impl LogFilter { + /// Returns whether the given message matches the [`LogFilter`]. pub fn is_match(&self, message: &str) -> bool { match self { Self::ShowAll => true, @@ -65,3 +67,26 @@ impl LogFilter { } } } + +/// Defines a sorting order for log entries +#[derive(Copy, Clone, Debug, Deserialize, serde::Serialize)] +pub enum Sort { + /// Log entries are sorted in ascending chronological order, i.e. + /// from oldest to newest. + Ascending, + /// Log entries are sorted in descending chronological order, i.e. + /// from newest to oldest. + Descending, +} + +impl FromStr for Sort { + type Err = String; + + fn from_str(s: &str) -> Result { + match s.to_lowercase().as_str() { + "asc" => Ok(Sort::Ascending), + "desc" => Ok(Sort::Descending), + _ => Err("could not recognize sort order".to_string()), + } + } +} From 4db276e8c8cce8b26ae23ff90f27a1c1f1af5e5b Mon Sep 17 00:00:00 2001 From: Louis Pahlavi Date: Wed, 26 Feb 2025 16:13:10 +0100 Subject: [PATCH 11/20] canlog and canlog_derive --- Cargo.lock | 144 ++++++++++----- Cargo.toml | 3 +- canister/Cargo.toml | 4 +- canister/src/lifecycle/mod.rs | 6 +- canister/src/logs/mod.rs | 17 +- canister/src/main.rs | 8 +- canister/src/state/mod.rs | 2 +- {libs/logs => canlog}/CHANGELOG.md | 0 {libs/logs => canlog}/Cargo.toml | 2 +- {libs/logs => canlog}/LICENSE | 0 canlog/README.md | 2 + {libs/logs => canlog}/src/lib.rs | 175 +++++++++--------- {libs/logs => canlog}/src/types/mod.rs | 0 canlog_derive/CHANGELOG.md | 8 + canlog_derive/Cargo.toml | 24 +++ canlog_derive/LICENSE | 1 + canlog_derive/README.md | 2 + canlog_derive/src/lib.rs | 154 ++++++++++++++++ integration_tests/Cargo.toml | 4 + integration_tests/tests/tests.rs | 235 ++++++++++++++++++++++++- libs/client/Cargo.toml | 2 +- libs/client/src/lib.rs | 8 +- libs/logs/README.md | 2 - libs/logs/src/tests.rs | 232 ------------------------ libs/types/Cargo.toml | 2 +- libs/types/src/lifecycle/mod.rs | 2 +- rust-toolchain.toml | 2 +- 27 files changed, 647 insertions(+), 394 deletions(-) rename {libs/logs => canlog}/CHANGELOG.md (100%) rename {libs/logs => canlog}/Cargo.toml (96%) rename {libs/logs => canlog}/LICENSE (100%) create mode 100644 canlog/README.md rename {libs/logs => canlog}/src/lib.rs (60%) rename {libs/logs => canlog}/src/types/mod.rs (100%) create mode 100644 canlog_derive/CHANGELOG.md create mode 100644 canlog_derive/Cargo.toml create mode 120000 canlog_derive/LICENSE create mode 100644 canlog_derive/README.md create mode 100644 canlog_derive/src/lib.rs delete mode 100644 libs/logs/README.md delete mode 100644 libs/logs/src/tests.rs diff --git a/Cargo.lock b/Cargo.lock index cab28869..9df5d81d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -135,9 +135,9 @@ checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" [[package]] name = "base64ct" -version = "1.6.0" +version = "1.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8c3c1a368f70d6cf7302d78f8f7093da241fb8e8807c05cc9e51a125895a6d5b" +checksum = "c103cbbedac994e292597ab79342dbd5b306a362045095db54917d92a9fdfd92" [[package]] name = "basic_solana" @@ -455,6 +455,33 @@ dependencies = [ "thiserror 1.0.69", ] +[[package]] +name = "canlog" +version = "0.1.0" +dependencies = [ + "candid", + "ic-canister-log", + "ic-cdk", + "paste", + "proptest", + "regex", + "serde", + "serde_json", +] + +[[package]] +name = "canlog_derive" +version = "0.1.0" +dependencies = [ + "canlog", + "darling", + "ic-canister-log", + "paste", + "proc-macro2", + "quote", + "syn 2.0.98", +] + [[package]] name = "cargo-platform" version = "0.1.9" @@ -479,9 +506,9 @@ dependencies = [ [[package]] name = "cc" -version = "1.2.14" +version = "1.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c3d1b2e905a3a7b00a6141adb0e4c0bb941d11caf55349d863942a1cc44e3c9" +checksum = "c736e259eea577f443d5c86c304f9f4ae0295c43f3ba05c21f1d66b5f06001af" dependencies = [ "shlex", ] @@ -703,6 +730,41 @@ dependencies = [ "syn 2.0.98", ] +[[package]] +name = "darling" +version = "0.20.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6f63b86c8a8826a49b8c21f08a2d07338eec8d900540f8630dc76284be802989" +dependencies = [ + "darling_core", + "darling_macro", +] + +[[package]] +name = "darling_core" +version = "0.20.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "95133861a8032aaea082871032f5815eb9e98cef03fa916ab4500513994df9e5" +dependencies = [ + "fnv", + "ident_case", + "proc-macro2", + "quote", + "strsim", + "syn 2.0.98", +] + +[[package]] +name = "darling_macro" +version = "0.20.10" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d336a2a514f6ccccaa3e09b02d41d35330c07ddf03a62165fcec10bb561c7806" +dependencies = [ + "darling_core", + "quote", + "syn 2.0.98", +] + [[package]] name = "data-encoding" version = "2.8.0" @@ -839,9 +901,9 @@ dependencies = [ [[package]] name = "either" -version = "1.13.0" +version = "1.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "60b1af1c220855b6ceac025d3f6ecdd2b7c4894bfe9cd9bda4fbb4bc7c0d4cf0" +checksum = "b7914353092ddf589ad78f25c5c1c21b7f80b0ff8621e7c814c3485b5306da9d" [[package]] name = "ena" @@ -1495,6 +1557,12 @@ dependencies = [ "syn 2.0.98", ] +[[package]] +name = "ident_case" +version = "1.0.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" + [[package]] name = "idna" version = "1.0.3" @@ -1611,9 +1679,9 @@ checksum = "884e2677b40cc8c339eaefcb701c32ef1fd2493d71118dc0ca4b6a736c93bd67" [[package]] name = "libc" -version = "0.2.169" +version = "0.2.170" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b5aba8db14291edd000dfcc4d620c7ebfb122c613afb886ca8803fa4e128a20a" +checksum = "875b3680cb2f8f71bdcf9a30f38d48282f5d3c95cbf9b3fa57269bb5d5c06828" [[package]] name = "libredox" @@ -1695,9 +1763,9 @@ dependencies = [ [[package]] name = "log" -version = "0.4.25" +version = "0.4.26" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "04cbf5b083de1c7e0222a7a51dbfdba1cbe1c6ab0b15e29fff3f6c077fd9cd9f" +checksum = "30bde2b3dc3671ae49d8e2e9f044c7c005836e7a023ee57cffa25ab82764bb9e" [[package]] name = "logos" @@ -1794,9 +1862,9 @@ dependencies = [ [[package]] name = "miniz_oxide" -version = "0.8.4" +version = "0.8.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b3b1c9bd4fe1f0f8b387f6eb9eb3b4a1aa26185e5750efb9140301703f62cd1b" +checksum = "8e3e04debbb59698c15bacbb6d93584a8c0ca9cc3213cb423d31f760d8843ce5" dependencies = [ "adler2", ] @@ -2327,9 +2395,9 @@ dependencies = [ [[package]] name = "redox_syscall" -version = "0.5.8" +version = "0.5.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "03a862b389f93e68874fbf580b9de08dd02facb9a788ebadaf4a3fd33cf58834" +checksum = "82b568323e98e49e2a0899dcee453dd679fae22d69adf9b11dd508d1549b7e2f" dependencies = [ "bitflags", ] @@ -2441,9 +2509,9 @@ dependencies = [ [[package]] name = "ring" -version = "0.17.9" +version = "0.17.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e75ec5e92c4d8aede845126adc388046234541629e76029599ed35a003c7ed24" +checksum = "da5349ae27d3887ca812fb375b45a4fbb36d8d12d2df394968cd86e35683fe73" dependencies = [ "cc", "cfg-if", @@ -2586,9 +2654,9 @@ dependencies = [ [[package]] name = "schemars" -version = "0.8.21" +version = "0.8.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "09c024468a378b7e36765cd36702b7a90cc3cba11654f6685c8f233408e89e92" +checksum = "3fbf2ae1b8bc8e02df939598064d22402220cd5bbcca1c76f7d6a310974d5615" dependencies = [ "dyn-clone", "schemars_derive", @@ -2598,9 +2666,9 @@ dependencies = [ [[package]] name = "schemars_derive" -version = "0.8.21" +version = "0.8.22" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1eee588578aff73f856ab961cd2f79e36bc45d7ded33a7562adba4667aecc0e" +checksum = "32e265784ad618884abaea0600a9adf15393368d840e0222d101a072f3f7534d" dependencies = [ "proc-macro2", "quote", @@ -2872,10 +2940,11 @@ version = "0.1.0" dependencies = [ "candid", "candid_parser", + "canlog", + "canlog_derive", "ciborium", "const_format", "hex", - "ic-canister-log", "ic-cdk", "ic-stable-structures", "paste", @@ -2883,7 +2952,6 @@ dependencies = [ "regex", "serde", "serde_bytes", - "sol_rpc_logs", "sol_rpc_types", "strum 0.27.1", "url", @@ -2896,12 +2964,12 @@ version = "0.1.0" dependencies = [ "async-trait", "candid", + "canlog", "ic-cdk", "serde", "serde_bytes", "serde_json", "sol_rpc_canister", - "sol_rpc_logs", "sol_rpc_types", ] @@ -2911,38 +2979,28 @@ version = "0.1.0" dependencies = [ "async-trait", "candid", + "canlog", + "canlog_derive", "ic-cdk", "ic-test-utilities-load-wasm", "pocket-ic", + "proptest", "serde", + "serde_json", "sol_rpc_client", "sol_rpc_types", "tokio", ] -[[package]] -name = "sol_rpc_logs" -version = "0.1.0" -dependencies = [ - "candid", - "ic-canister-log", - "ic-cdk", - "paste", - "proptest", - "regex", - "serde", - "serde_json", -] - [[package]] name = "sol_rpc_types" version = "0.1.0" dependencies = [ "candid", + "canlog", "ic-cdk", "regex", "serde", - "sol_rpc_logs", "strum 0.27.1", "url", ] @@ -3750,9 +3808,9 @@ checksum = "a8f112729512f8e442d81f95a8a7ddf2b7c6b8a1a6f509a95864142b30cab2d3" [[package]] name = "stacker" -version = "0.1.18" +version = "0.1.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d08feb8f695b465baed819b03c128dc23f57a694510ab1f06c77f763975685e" +checksum = "d9156ebd5870ef293bfb43f91c7a74528d363ec0d424afe24160ed5a4343d08a" dependencies = [ "cc", "cfg-if", @@ -3773,6 +3831,12 @@ dependencies = [ "precomputed-hash", ] +[[package]] +name = "strsim" +version = "0.11.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f" + [[package]] name = "strum" version = "0.26.3" diff --git a/Cargo.toml b/Cargo.toml index d0fad4fe..b6efb3ea 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -3,8 +3,9 @@ resolver = "2" members = [ "canister", "integration_tests", "libs/client", - "libs/logs", "libs/types", + "canlog", + "canlog_derive", "examples/basic_solana"] [workspace.package] diff --git a/canister/Cargo.toml b/canister/Cargo.toml index bfe2d7b4..8764ee81 100644 --- a/canister/Cargo.toml +++ b/canister/Cargo.toml @@ -14,14 +14,14 @@ path = "src/main.rs" [dependencies] candid = { workspace = true } +canlog = { path = "../canlog" } +canlog_derive = { path = "../canlog_derive" } ciborium = { workspace = true } const_format = { workspace = true } hex = { workspace = true } -ic-canister-log = { workspace = true } ic-cdk = { workspace = true } ic-stable-structures = { workspace = true } sol_rpc_types = { path = "../libs/types" } -sol_rpc_logs = { path = "../libs/logs" } regex = { workspace = true } serde = { workspace = true } serde_bytes = { workspace = true } diff --git a/canister/src/lifecycle/mod.rs b/canister/src/lifecycle/mod.rs index 32b2d5e3..bf25a74d 100644 --- a/canister/src/lifecycle/mod.rs +++ b/canister/src/lifecycle/mod.rs @@ -1,8 +1,8 @@ use crate::{ - logs::INFO, + logs::Priority, state::{init_state, mutate_state, State}, }; -use ic_canister_log::log; +use canlog::{log, LogPriorityLevels}; use sol_rpc_types::InstallArgs; pub fn init(args: InstallArgs) { @@ -12,7 +12,7 @@ pub fn init(args: InstallArgs) { pub fn post_upgrade(args: Option) { if let Some(args) = args { log!( - INFO, + Priority::Info, "[init]: upgraded SOL RPC canister with arg: {:?}", args ); diff --git a/canister/src/logs/mod.rs b/canister/src/logs/mod.rs index 456464c7..5305817d 100644 --- a/canister/src/logs/mod.rs +++ b/canister/src/logs/mod.rs @@ -1,13 +1,16 @@ use crate::state::read_state; -use sol_rpc_logs::{declare_log_priorities, GetLogFilter, LogFilter}; +use canlog::{GetLogFilter, LogFilter}; use std::str::FromStr; +use serde::{Deserialize, Serialize}; -declare_log_priorities! { - pub enum Priority { - Info(capacity = 1000, buffer = INFO), - Debug(capacity = 1000, buffer = DEBUG), - TraceHttp(capacity = 1000, buffer = TRACE_HTTP) - } +#[derive(canlog_derive::LogPriorityLevels, Serialize, Deserialize, PartialEq, Debug, Copy, Clone)] +pub enum Priority { + #[log_level(capacity = 1000, name = "INFO")] + Info, + #[log_level(capacity = 1000, name = "DEBUG")] + Debug, + #[log_level(capacity = 1000, name = "TRACE_HTTP")] + TraceHttp, } impl GetLogFilter for Priority { diff --git a/canister/src/main.rs b/canister/src/main.rs index e29fa430..d232e7c1 100644 --- a/canister/src/main.rs +++ b/canister/src/main.rs @@ -1,5 +1,5 @@ use candid::candid_method; -use ic_canister_log::log; +use canlog::{log, Log, LogPriorityLevels, Sort}; use ic_cdk::{ api::is_controller, {query, update}, @@ -7,11 +7,9 @@ use ic_cdk::{ use sol_rpc_canister::{ http_types, lifecycle, logs::Priority, - logs::INFO, providers::{find_provider, PROVIDERS}, state::{mutate_state, read_state}, }; -use sol_rpc_logs::{Log, Sort}; use sol_rpc_types::{ProviderId, RpcAccess}; use std::str::FromStr; @@ -43,7 +41,7 @@ fn get_providers() -> Vec { /// Panics if the list of provider IDs includes a nonexistent or "unauthenticated" (fully public) provider. async fn update_api_keys(api_keys: Vec<(ProviderId, Option)>) { log!( - INFO, + Priority::Info, "[{}] Updating API keys for providers: {}", ic_cdk::caller(), api_keys @@ -73,7 +71,7 @@ async fn update_api_keys(api_keys: Vec<(ProviderId, Option)>) { #[query(hidden = true)] fn http_request(request: http_types::HttpRequest) -> http_types::HttpResponse { match request.path() { - "/logs" => { + "/canlog" => { let max_skip_timestamp = match request.raw_query_param("time") { Some(arg) => match u64::from_str(arg) { Ok(value) => value, diff --git a/canister/src/state/mod.rs b/canister/src/state/mod.rs index 0fb205cd..f65610df 100644 --- a/canister/src/state/mod.rs +++ b/canister/src/state/mod.rs @@ -9,7 +9,7 @@ use ic_stable_structures::{ Cell, DefaultMemoryImpl, Storable, }; use serde::Serialize; -use sol_rpc_logs::LogFilter; +use canlog::LogFilter; use sol_rpc_types::{InstallArgs, ProviderId}; use std::{borrow::Cow, cell::RefCell, collections::BTreeMap}; diff --git a/libs/logs/CHANGELOG.md b/canlog/CHANGELOG.md similarity index 100% rename from libs/logs/CHANGELOG.md rename to canlog/CHANGELOG.md diff --git a/libs/logs/Cargo.toml b/canlog/Cargo.toml similarity index 96% rename from libs/logs/Cargo.toml rename to canlog/Cargo.toml index a8a671a3..2309bb48 100644 --- a/libs/logs/Cargo.toml +++ b/canlog/Cargo.toml @@ -1,5 +1,5 @@ [package] -name = "sol_rpc_logs" +name = "canlog" version = "0.1.0" description = "Crate for managing canister logs" authors.workspace = true diff --git a/libs/logs/LICENSE b/canlog/LICENSE similarity index 100% rename from libs/logs/LICENSE rename to canlog/LICENSE diff --git a/canlog/README.md b/canlog/README.md new file mode 100644 index 00000000..601ac611 --- /dev/null +++ b/canlog/README.md @@ -0,0 +1,2 @@ +# Crate `canlog` + diff --git a/libs/logs/src/lib.rs b/canlog/src/lib.rs similarity index 60% rename from libs/logs/src/lib.rs rename to canlog/src/lib.rs index b7efa2f3..5947df01 100644 --- a/libs/logs/src/lib.rs +++ b/canlog/src/lib.rs @@ -1,101 +1,94 @@ -//! Crate for managing canister logs +//! Crate for managing canister canlog #![forbid(unsafe_code)] #![forbid(missing_docs)] -#[cfg(test)] -mod tests; - mod types; pub use crate::types::{LogFilter, Sort}; -use ic_canister_log::{export as export_logs, GlobalBuffer, Sink}; +pub use ic_canister_log::{declare_log_buffer, export as export_logs, GlobalBuffer, Sink, log as raw_log}; use serde::{Deserialize, Serialize}; -/// Use this macro to declare en enum containing priority levels and the corresponding -/// buffers as defined in the [`ic_canister_log`]. The resulting log priority automatically -/// implements the [`LogPriority`] trait. -/// -/// The [`GetLogFilter`] trait should be implemented manually for the resulting enum. +/// Wrapper for the [`ic_canister_log::log`] macro that allows logging +/// for a given variant of an enum implementing the [`LogPriorityLevels`] +/// trait. /// -/// # Example +/// **Usage Example:** /// ```rust -/// use ic_canister_log::log; -/// use sol_rpc_logs::{declare_log_priorities, GetLogFilter, LogFilter}; +/// use canlog::{GetLogFilter, LogFilter, LogPriorityLevels, log, declare_log_buffer, PrintProxySink}; +/// +/// enum LogPriority { +/// Info, +/// Debug, +/// } +/// +/// declare_log_buffer!(name = INFO_BUF, capacity = 100); +/// declare_log_buffer!(name = DEBUG_BUF, capacity = 500); +/// +/// const INFO: PrintProxySink = PrintProxySink(&LogPriority::Info, &INFO_BUF); +/// const DEBUG: PrintProxySink = PrintProxySink(&LogPriority::Info, &DEBUG_BUF); +/// +/// impl LogPriorityLevels for LogPriority { +/// fn get_buffer(&self) -> &'static canlog::GlobalBuffer { +/// match self { +/// Self::Info => &INFO_BUF, +/// Self::Debug => &DEBUG_BUF, +/// } +/// } /// -/// // Each log priority is defined here with a capacity of 1000 -/// declare_log_priorities! { -/// pub enum Priority { -/// Info(capacity = 1000, buffer = INFO), -/// Debug(capacity = 1000, buffer = DEBUG) +/// fn get_sink(&self) -> &impl canlog::Sink { +/// match self { +/// Self::Info => &INFO, +/// Self::Debug => &DEBUG, +/// } +/// } +/// +/// fn display_name(&self) -> &'static str { +/// match self { +/// Self::Info => "INFO", +/// Self::Debug => "DEBUG", +/// } +/// } +/// +/// fn get_priorities() -> &'static [Self] { +/// &[Self::Info, Self::Debug] /// } /// } /// -/// impl GetLogFilter for Priority { +/// impl GetLogFilter for LogPriority { /// fn get_log_filter() -> LogFilter { /// LogFilter::ShowAll /// } /// } /// -/// log!(INFO, "Some noteworthy event"); -/// log!(DEBUG, "A less noteworthy event"); +/// fn main() { +/// log!(LogPriority::Info, "Some rather important message."); +/// log!(LogPriority::Debug, "Some less important message."); +/// } +/// ``` +/// +/// **Expected Output:** +/// ```text +/// 2025-02-26 08:27:10 UTC: [Canister lxzze-o7777-77777-aaaaa-cai] INFO main.rs:13 Some rather important message. +/// 2025-02-26 08:27:10 UTC: [Canister lxzze-o7777-77777-aaaaa-cai] DEBUG main.rs:14 Some less important message. /// ``` #[macro_export] -macro_rules! declare_log_priorities { - ( - pub enum $enum_name:ident { - $($variant:ident(capacity = $capacity:expr, buffer = $uppercase:expr)),* - } - ) => { - // Declare the log priority enum - #[derive(Copy, Clone, Debug, Eq, PartialEq, candid::CandidType, serde::Deserialize, serde::Serialize)] - pub enum $enum_name { - $($variant),* - } - - // Declare the buffers for each log priority level - $(paste::paste! { - ic_canister_log::declare_log_buffer!(name = [<$uppercase _BUF>], capacity = $capacity); - pub const $uppercase: $crate::PrintProxySink<$enum_name> = $crate::PrintProxySink(&$enum_name::$variant, &[<$uppercase _BUF>]); - })* - - // Array containing all enum variants - impl $enum_name { - const VARIANTS: &'static [Self] = &[ - $(Self::$variant),* - ]; - } - - // Implement some methods for the priority enum - impl $crate::LogPriority for $enum_name { - - fn get_buffer(&self) -> &'static ic_canister_log::GlobalBuffer { - match self { - $(Self::$variant => &paste::paste!([<$uppercase _BUF>]),)* - } - } - - fn as_str_uppercase(&self) -> &'static str { - match self { - $(Self::$variant => stringify!($uppercase),)* - } - } - - fn get_priorities() -> &'static [Self] { - Self::VARIANTS - } - } +macro_rules! log { + ($enum_variant:expr, $($args:tt)*) => { + canlog::raw_log!($enum_variant.get_sink(), $($args)*); }; } /// Represents a log priority level. This trait is meant to be implemented /// automatically by the [`declare_log_priorities!`] macro. -pub trait LogPriority { - /// Returns a reference to the [`GlobalBuffer`] where the log entries are stored. +pub trait LogPriorityLevels { + #[doc(hidden)] fn get_buffer(&self) -> &'static GlobalBuffer; + #[doc(hidden)] + fn get_sink(&self) -> &impl Sink; - /// Returns an uppercase `&str` representing a log priority level. - fn as_str_uppercase(&self) -> &'static str; + /// Returns a display representation for a log priority level. + fn display_name(&self) -> &'static str; /// Returns an array containing all the log priority levels. fn get_priorities() -> &'static [Self] @@ -111,26 +104,6 @@ pub trait GetLogFilter { fn get_log_filter() -> LogFilter; } -/// Defines how log entries are displayed and appended to the corresponding [`GlobalBuffer`]. -#[derive(Debug)] -pub struct PrintProxySink(pub &'static Priority, pub &'static GlobalBuffer); - -impl Sink for PrintProxySink { - fn append(&self, entry: ic_canister_log::LogEntry) { - let message = format!( - "{} {}:{} {}", - self.0.as_str_uppercase(), - entry.file, - entry.line, - entry.message, - ); - if Priority::get_log_filter().is_match(&message) { - ic_cdk::println!("{}", message); - self.1.append(entry) - } - } -} - /// A single log entry. #[derive(Clone, Debug, Eq, PartialEq, Deserialize, serde::Serialize)] pub struct LogEntry { @@ -163,7 +136,7 @@ impl Default for Log { impl<'de, Priority> Log where - Priority: LogPriority + Clone + Copy + Deserialize<'de> + Serialize + 'static, + Priority: LogPriorityLevels + Clone + Copy + Deserialize<'de> + Serialize + 'static, { /// Append all the entries from the given [`Priority`] to [`entries`]. pub fn push_logs(&mut self, priority: Priority) { @@ -186,7 +159,7 @@ where .for_each(|priority| self.push_logs(*priority)); } - /// Serialize the logs contained in `entries` into a JSON string. + /// Serialize the canlog contained in `entries` into a JSON string. /// /// If the resulting string is larger than `max_body_size` bytes, /// truncate `entries` so the resulting serialized JSON string @@ -231,3 +204,23 @@ where self.entries.sort_by(|a, b| b.timestamp.cmp(&a.timestamp)); } } + +#[doc(hidden)] +#[derive(Debug)] +pub struct PrintProxySink(pub &'static Priority, pub &'static GlobalBuffer); + +impl Sink for PrintProxySink { + fn append(&self, entry: ic_canister_log::LogEntry) { + let message = format!( + "{} {}:{} {}", + self.0.display_name(), + entry.file, + entry.line, + entry.message, + ); + if Priority::get_log_filter().is_match(&message) { + ic_cdk::println!("{}", message); + self.1.append(entry) + } + } +} diff --git a/libs/logs/src/types/mod.rs b/canlog/src/types/mod.rs similarity index 100% rename from libs/logs/src/types/mod.rs rename to canlog/src/types/mod.rs diff --git a/canlog_derive/CHANGELOG.md b/canlog_derive/CHANGELOG.md new file mode 100644 index 00000000..5fda63a8 --- /dev/null +++ b/canlog_derive/CHANGELOG.md @@ -0,0 +1,8 @@ +Changelog + +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.1.0/), +and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). + +## [Unreleased] \ No newline at end of file diff --git a/canlog_derive/Cargo.toml b/canlog_derive/Cargo.toml new file mode 100644 index 00000000..cc2fc410 --- /dev/null +++ b/canlog_derive/Cargo.toml @@ -0,0 +1,24 @@ +[package] +name = "canlog_derive" +version = "0.1.0" +description = "Crate with macro definitions for the canlog crate" +authors.workspace = true +edition.workspace = true +repository.workspace = true +homepage.workspace = true +license.workspace = true +readme = "README.md" +include = ["src", "Cargo.toml", "CHANGELOG.md", "LICENSE", "README.md"] + +[dependencies] +canlog = { path = "../canlog" } +ic-canister-log = { workspace = true } +paste = { workspace = true } + +syn = { version = "2.0.98", features = ["derive"] } +quote = "1.0.38" +proc-macro2 = "1.0.93" +darling = "0.20.10" + +[lib] +proc-macro = true diff --git a/canlog_derive/LICENSE b/canlog_derive/LICENSE new file mode 120000 index 00000000..30cff740 --- /dev/null +++ b/canlog_derive/LICENSE @@ -0,0 +1 @@ +../../LICENSE \ No newline at end of file diff --git a/canlog_derive/README.md b/canlog_derive/README.md new file mode 100644 index 00000000..18aab6f0 --- /dev/null +++ b/canlog_derive/README.md @@ -0,0 +1,2 @@ +# Crate `canlog_derive` + diff --git a/canlog_derive/src/lib.rs b/canlog_derive/src/lib.rs new file mode 100644 index 00000000..7108b001 --- /dev/null +++ b/canlog_derive/src/lib.rs @@ -0,0 +1,154 @@ +//! Crate for managing canister canlog + +#![forbid(unsafe_code)] +#![forbid(missing_docs)] + +use darling::FromVariant; +use proc_macro::TokenStream; +use proc_macro2::Ident; +use quote::quote; +use syn::{parse_macro_input, Data, DataEnum, DeriveInput}; + +/// A procedural macro to implement [`LogPriorityLevels`](canlog::LogPriorityLevels) for an enum. +/// +/// This macro expects the variants to be annotated with `#[log_level(capacity = N, name = "NAME")]` +/// where `N` is an integer representing buffer capacity and `"NAME"` is a string display +/// representation for the corresponding log level. +/// +/// The enum annotated with `#[derive(LogPriorityLevels)]` must also implement the +/// [`Serialize`](serde::Serialize), [`Deserialize`](serde::Deserialize), +/// [`Clone`](core::clone::Clone) and [`Copy`](core::marker::Copy) traits +/// +/// **Generated Code:** +/// 1. Declares a [`GlobalBuffer`](ic_canister_log::GlobalBuffer) and +/// [`Sink`](ic_canister_log::Sink) constant for each variant. +/// 2. Implements the [LogPriorityLevels](canlog::LogPriorityLevels) trait for the enum. +/// +/// **Usage Example:** +/// ```rust +/// use canlog::{GetLogFilter, LogFilter, LogPriorityLevels, log}; +/// use canlog_derive::LogPriorityLevels; +/// +/// #[derive(LogPriorityLevels)] +/// enum LogPriority { +/// #[log_level(capacity = 100, name = "INFO")] +/// Info, +/// #[log_level(capacity = 500, name = "DEBUG")] +/// Debug, +/// } +/// +/// impl GetLogFilter for LogPriority { +/// fn get_log_filter() -> LogFilter { +/// LogFilter::ShowAll +/// } +/// } +/// +/// fn main() { +/// log!(LogPriority::Info, "Some rather important message."); +/// log!(LogPriority::Debug, "Some less important message."); +/// } +/// ``` +/// +/// **Expected Output:** +/// ```text +/// 2025-02-26 08:27:10 UTC: [Canister lxzze-o7777-77777-aaaaa-cai] INFO main.rs:13 Some rather important message. +/// 2025-02-26 08:27:10 UTC: [Canister lxzze-o7777-77777-aaaaa-cai] DEBUG main.rs:14 Some less important message. +/// ``` +#[proc_macro_derive(LogPriorityLevels, attributes(log_level))] +pub fn derive_log_priority(input: TokenStream) -> TokenStream { + let input = parse_macro_input!(input as DeriveInput); + let enum_ident = &input.ident; + + let Data::Enum(DataEnum { variants, .. }) = &input.data else { + panic!("This trait can only be derived for enums"); + }; + + // Declare a buffer and sink for each enum variant + let buffer_declarations = variants.iter().map(|variant| { + let variant_ident = &variant.ident; + let info = LogLevelInfo::from_variant(variant) + .expect(format!("Invalid attributes for log level: {}", variant_ident).as_str()); + + let buffer_ident = get_buffer_ident(variant_ident); + let sink_ident = get_sink_ident(variant_ident); + let capacity = info.capacity; + + quote! { + canlog::declare_log_buffer!(name = #buffer_ident, capacity = #capacity); + pub const #sink_ident: canlog::PrintProxySink<#enum_ident> = canlog::PrintProxySink(&#enum_ident::#variant_ident, &#buffer_ident); + } + }); + + // Match arms to get the corresponding buffer, sink and display name for each enum variant + let buffer_match_arms = variants.iter().map(|variant| { + let variant_ident = &variant.ident; + let buffer_ident = get_buffer_ident(variant_ident); + quote! { + Self::#variant_ident => &#buffer_ident, + } + }); + let sink_match_arms = variants.iter().map(|variant| { + let variant_ident = &variant.ident; + let sink_ident = get_sink_ident(variant_ident); + quote! { + Self::#variant_ident => &#sink_ident, + } + }); + let display_name_match_arms = variants.iter().map(|variant| { + let variant_ident = &variant.ident; + let display_name = LogLevelInfo::from_variant(variant).unwrap().name; + quote! { + Self::#variant_ident => #display_name, + } + }); + let variants_array = variants.iter().map(|variant| { + let variant_ident = &variant.ident; + quote! { Self::#variant_ident, } + }); + + // Generate buffer declarations and trait implementation + let trait_impl = quote! { + #(#buffer_declarations)* + + impl canlog::LogPriorityLevels for #enum_ident { + fn get_buffer(&self) -> &'static canlog::GlobalBuffer { + match self { + #(#buffer_match_arms)* + } + } + + fn get_sink(&self) -> &impl canlog::Sink { + match self { + #(#sink_match_arms)* + } + } + + fn display_name(&self) -> &'static str { + match self { + #(#display_name_match_arms)* + } + } + + fn get_priorities() -> &'static [Self] { + &[#(#variants_array)*] + } + } + }; + + trait_impl.into() +} + +#[derive(FromVariant)] +#[darling(attributes(log_level))] +struct LogLevelInfo { + capacity: usize, + name: String, +} + +fn get_sink_ident(variant_ident: &Ident) -> Ident { + quote::format_ident!("{}", variant_ident.to_string().to_uppercase()) +} + +fn get_buffer_ident(variant_ident: &Ident) -> Ident { + quote::format_ident!("{}_BUF", variant_ident.to_string().to_uppercase()) +} diff --git a/integration_tests/Cargo.toml b/integration_tests/Cargo.toml index 20cba9f8..ada18fc5 100644 --- a/integration_tests/Cargo.toml +++ b/integration_tests/Cargo.toml @@ -10,10 +10,14 @@ license.workspace = true [dependencies] async-trait = { workspace = true } candid = { workspace = true } +canlog = { path = "../canlog" } +canlog_derive = { path = "../canlog_derive" } ic-cdk = { workspace = true } ic-test-utilities-load-wasm = { workspace = true } pocket-ic = { workspace = true } +proptest = { workspace = true } serde = { workspace = true } +serde_json = { workspace = true } sol_rpc_client = { path = "../libs/client" } sol_rpc_types = { path = "../libs/types" } diff --git a/integration_tests/tests/tests.rs b/integration_tests/tests/tests.rs index 5b03a8e5..203c7b51 100644 --- a/integration_tests/tests/tests.rs +++ b/integration_tests/tests/tests.rs @@ -43,7 +43,7 @@ mod retrieve_logs_tests { assert_eq!(client.retrieve_logs("DEBUG").await, vec![]); assert_eq!(client.retrieve_logs("INFO").await, vec![]); - // Generate some logs + // Generate some canlog setup .client() .with_caller(setup.controller()) @@ -188,3 +188,236 @@ mod canister_upgrade_tests { .await; } } + +use canlog::LogFilter; +use std::cell::RefCell; + +thread_local! { + static LOG_FILTER: RefCell = RefCell::default(); +} + +mod logging_tests { + use super::*; + use canlog::{log, GetLogFilter, Log, LogEntry, LogPriorityLevels, Sort}; + use canlog_derive::LogPriorityLevels; + use proptest::{prop_assert, proptest}; + use serde::{Deserialize, Serialize}; + + #[derive(Clone, Copy, Serialize, Deserialize, LogPriorityLevels)] + enum TestPriority { + #[log_level(capacity = 1000, name = "INFO_TEST")] + Info, + } + + impl GetLogFilter for TestPriority { + fn get_log_filter() -> LogFilter { + LOG_FILTER.with(|cell| cell.borrow().clone()) + } + } + + fn set_log_filter(filter: LogFilter) { + LOG_FILTER.set(filter); + } + + fn info_log_entry_with_timestamp(timestamp: u64) -> LogEntry { + LogEntry { + timestamp, + priority: TestPriority::Info, + file: String::default(), + line: 0, + message: String::default(), + counter: 0, + } + } + + fn is_ascending(log: &Log) -> bool { + for i in 0..log.entries.len() - 1 { + if log.entries[i].timestamp > log.entries[i + 1].timestamp { + return false; + } + } + true + } + + fn is_descending(log: &Log) -> bool { + for i in 0..log.entries.len() - 1 { + if log.entries[i].timestamp < log.entries[i + 1].timestamp { + return false; + } + } + true + } + + fn get_messages() -> Vec { + canlog::export_logs(TestPriority::Info.get_buffer()) + .into_iter() + .map(|entry| entry.message) + .collect() + } + + proptest! { + #[test] + fn logs_always_fit_in_message( + number_of_entries in 1..100_usize, + entry_size in 1..10000_usize, + max_body_size in 100..10000_usize + ) { + let mut entries: Vec> = vec![]; + for _ in 0..number_of_entries { + entries.push(LogEntry { + timestamp: 0, + priority: TestPriority::Info, + file: String::default(), + line: 0, + message: "1".repeat(entry_size), + counter: 0, + }); + } + let log = Log { entries }; + let truncated_logs_json_len = log.serialize_logs(max_body_size).len(); + prop_assert!(truncated_logs_json_len <= max_body_size); + } + } + + #[test] + fn sorting_order() { + let mut log = Log { entries: vec![] }; + log.entries.push(info_log_entry_with_timestamp(2)); + log.entries.push(info_log_entry_with_timestamp(0)); + log.entries.push(info_log_entry_with_timestamp(1)); + + log.sort_logs(Sort::Ascending); + assert!(is_ascending(&log)); + + log.sort_logs(Sort::Descending); + assert!(is_descending(&log)); + } + + #[test] + fn simple_logs_truncation() { + let mut entries: Vec> = vec![]; + const MAX_BODY_SIZE: usize = 3_000_000; + + for _ in 0..10 { + entries.push(LogEntry { + timestamp: 0, + priority: TestPriority::Info, + file: String::default(), + line: 0, + message: String::default(), + counter: 0, + }); + } + let log = Log { + entries: entries.clone(), + }; + let small_len = serde_json::to_string(&log).unwrap_or_default().len(); + + entries.push(LogEntry { + timestamp: 0, + priority: TestPriority::Info, + file: String::default(), + line: 0, + message: "1".repeat(MAX_BODY_SIZE), + counter: 0, + }); + let log = Log { entries }; + let entries_json = serde_json::to_string(&log).unwrap_or_default(); + assert!(entries_json.len() > MAX_BODY_SIZE); + + let truncated_logs_json = log.serialize_logs(MAX_BODY_SIZE); + + assert_eq!(small_len, truncated_logs_json.len()); + } + + #[test] + fn one_entry_too_big() { + let mut entries: Vec> = vec![]; + const MAX_BODY_SIZE: usize = 3_000_000; + + entries.push(LogEntry { + timestamp: 0, + priority: TestPriority::Info, + file: String::default(), + line: 0, + message: "1".repeat(MAX_BODY_SIZE), + counter: 0, + }); + let log = Log { entries }; + let truncated_logs_json_len = log.serialize_logs(MAX_BODY_SIZE).len(); + assert!(truncated_logs_json_len < MAX_BODY_SIZE); + assert_eq!("{\"entries\":[]}", log.serialize_logs(MAX_BODY_SIZE)); + } + + #[test] + fn should_truncate_last_entry() { + let log_entries = vec![ + info_log_entry_with_timestamp(0), + info_log_entry_with_timestamp(1), + info_log_entry_with_timestamp(2), + ]; + let log_with_2_entries = Log { + entries: { + let mut entries = log_entries.clone(); + entries.pop(); + entries + }, + }; + let log_with_3_entries = Log { + entries: log_entries, + }; + + let serialized_log_with_2_entries = log_with_2_entries.serialize_logs(usize::MAX); + let serialized_log_with_3_entries = + log_with_3_entries.serialize_logs(serialized_log_with_2_entries.len()); + + assert_eq!(serialized_log_with_3_entries, serialized_log_with_2_entries); + } + + #[test] + fn should_show_all() { + set_log_filter(LogFilter::ShowAll); + log!(TestPriority::Info, "ABC"); + log!(TestPriority::Info, "123"); + log!(TestPriority::Info, "!@#"); + assert_eq!(get_messages(), vec!["ABC", "123", "!@#"]); + } + + #[test] + fn should_hide_all() { + set_log_filter(LogFilter::HideAll); + log!(TestPriority::Info, "ABC"); + log!(TestPriority::Info, "123"); + log!(TestPriority::Info, "!@#"); + assert_eq!(get_messages().len(), 0); + } + + #[test] + fn should_show_pattern() { + set_log_filter(LogFilter::ShowPattern("end$".into())); + log!(TestPriority::Info, "message"); + log!(TestPriority::Info, "message end"); + log!(TestPriority::Info, "end message"); + assert_eq!(get_messages(), vec!["message end"]); + } + + #[test] + fn should_hide_pattern_including_message_type() { + set_log_filter(LogFilter::ShowPattern("^INFO_TEST [^ ]* 123".into())); + log!(TestPriority::Info, "123"); + log!(TestPriority::Info, "INFO_TEST 123"); + log!(TestPriority::Info, ""); + log!(TestPriority::Info, "123456"); + assert_eq!(get_messages(), vec!["123", "123456"]); + } + + #[test] + fn should_hide_pattern() { + set_log_filter(LogFilter::HidePattern("[ABC]".into())); + log!(TestPriority::Info, "remove A"); + log!(TestPriority::Info, "...B..."); + log!(TestPriority::Info, "C"); + log!(TestPriority::Info, "message"); + assert_eq!(get_messages(), vec!["message"]); + } +} diff --git a/libs/client/Cargo.toml b/libs/client/Cargo.toml index 8a9042ee..5bc07642 100644 --- a/libs/client/Cargo.toml +++ b/libs/client/Cargo.toml @@ -17,6 +17,6 @@ ic-cdk = { workspace = true } serde = { workspace = true } serde_bytes = { workspace = true } serde_json = { workspace = true } -sol_rpc_logs = { path = "../logs" } +canlog = { path = "../../canlog" } sol_rpc_types = { path = "../types" } sol_rpc_canister = { path = "../../canister" } \ No newline at end of file diff --git a/libs/client/src/lib.rs b/libs/client/src/lib.rs index 7d7e031d..bfabfb57 100644 --- a/libs/client/src/lib.rs +++ b/libs/client/src/lib.rs @@ -12,7 +12,7 @@ use sol_rpc_canister::{ http_types::{HttpRequest, HttpResponse}, logs::Priority, }; -use sol_rpc_logs::{Log, LogEntry}; +use canlog::{Log, LogEntry}; use sol_rpc_types::ProviderId; /// Abstract the canister runtime so that the client code can be reused: @@ -98,11 +98,11 @@ impl SolRpcClient { .unwrap() } - /// Retrieve logs from the SOL RPC canister from the HTTP endpoint. + /// Retrieve canlog from the SOL RPC canister from the HTTP endpoint. pub async fn retrieve_logs(&self, priority: &str) -> Vec> { let request = HttpRequest { method: "".to_string(), - url: format!("/logs?priority={priority}"), + url: format!("/canlog?priority={priority}"), headers: vec![], body: serde_bytes::ByteBuf::new(), }; @@ -112,7 +112,7 @@ impl SolRpcClient { .await .unwrap(); serde_json::from_slice::>(&response.body) - .expect("failed to parse SOL RPC canister logs") + .expect("failed to parse SOL RPC canister canlog") .entries } } diff --git a/libs/logs/README.md b/libs/logs/README.md deleted file mode 100644 index 12204801..00000000 --- a/libs/logs/README.md +++ /dev/null @@ -1,2 +0,0 @@ -# Crate `sol_rpc_logs` - diff --git a/libs/logs/src/tests.rs b/libs/logs/src/tests.rs deleted file mode 100644 index 48671145..00000000 --- a/libs/logs/src/tests.rs +++ /dev/null @@ -1,232 +0,0 @@ -use super::{declare_log_priorities, GetLogFilter, Log, LogEntry, LogPriority}; -use crate::types::{LogFilter, Sort}; -use ic_canister_log::{export, log}; -use proptest::{prop_assert, proptest}; -use std::cell::RefCell; - -thread_local! { - static LOG_FILTER: RefCell = RefCell::default(); -} - -declare_log_priorities! { - pub enum TestPriority { - Info(capacity = 1000, buffer = INFO_TEST) - } -} - -impl GetLogFilter for TestPriority { - fn get_log_filter() -> LogFilter { - LOG_FILTER.with(|cell| cell.borrow().clone()) - } -} - -fn set_log_filter(filter: LogFilter) { - LOG_FILTER.set(filter); -} - -fn info_log_entry_with_timestamp(timestamp: u64) -> LogEntry { - LogEntry { - timestamp, - priority: TestPriority::Info, - file: String::default(), - line: 0, - message: String::default(), - counter: 0, - } -} - -fn is_ascending(log: &Log) -> bool { - for i in 0..log.entries.len() - 1 { - if log.entries[i].timestamp > log.entries[i + 1].timestamp { - return false; - } - } - true -} - -fn is_descending(log: &Log) -> bool { - for i in 0..log.entries.len() - 1 { - if log.entries[i].timestamp < log.entries[i + 1].timestamp { - return false; - } - } - true -} - -fn get_messages() -> Vec { - export(TestPriority::Info.get_buffer()) - .into_iter() - .map(|entry| entry.message) - .collect() -} - -proptest! { - #[test] - fn logs_always_fit_in_message( - number_of_entries in 1..100_usize, - entry_size in 1..10000_usize, - max_body_size in 100..10000_usize - ) { - let mut entries: Vec> = vec![]; - for _ in 0..number_of_entries { - entries.push(LogEntry { - timestamp: 0, - priority: TestPriority::Info, - file: String::default(), - line: 0, - message: "1".repeat(entry_size), - counter: 0, - }); - } - let log = Log { entries }; - let truncated_logs_json_len = log.serialize_logs(max_body_size).len(); - prop_assert!(truncated_logs_json_len <= max_body_size); - } -} - -#[test] -fn sorting_order() { - let mut log = Log { entries: vec![] }; - log.entries.push(info_log_entry_with_timestamp(2)); - log.entries.push(info_log_entry_with_timestamp(0)); - log.entries.push(info_log_entry_with_timestamp(1)); - log.sort_asc(); - assert!(is_ascending(&log)); - - log.sort_desc(); - assert!(is_descending(&log)); - - log.sort_logs(Sort::Ascending); - assert!(is_ascending(&log)); - - log.sort_logs(Sort::Descending); - assert!(is_descending(&log)); -} - -#[test] -fn simple_logs_truncation() { - let mut entries: Vec> = vec![]; - const MAX_BODY_SIZE: usize = 3_000_000; - - for _ in 0..10 { - entries.push(LogEntry { - timestamp: 0, - priority: TestPriority::Info, - file: String::default(), - line: 0, - message: String::default(), - counter: 0, - }); - } - let log = Log { - entries: entries.clone(), - }; - let small_len = serde_json::to_string(&log).unwrap_or_default().len(); - - entries.push(LogEntry { - timestamp: 0, - priority: TestPriority::Info, - file: String::default(), - line: 0, - message: "1".repeat(MAX_BODY_SIZE), - counter: 0, - }); - let log = Log { entries }; - let entries_json = serde_json::to_string(&log).unwrap_or_default(); - assert!(entries_json.len() > MAX_BODY_SIZE); - - let truncated_logs_json = log.serialize_logs(MAX_BODY_SIZE); - - assert_eq!(small_len, truncated_logs_json.len()); -} - -#[test] -fn one_entry_too_big() { - let mut entries: Vec> = vec![]; - const MAX_BODY_SIZE: usize = 3_000_000; - - entries.push(LogEntry { - timestamp: 0, - priority: TestPriority::Info, - file: String::default(), - line: 0, - message: "1".repeat(MAX_BODY_SIZE), - counter: 0, - }); - let log = Log { entries }; - let truncated_logs_json_len = log.serialize_logs(MAX_BODY_SIZE).len(); - assert!(truncated_logs_json_len < MAX_BODY_SIZE); - assert_eq!("{\"entries\":[]}", log.serialize_logs(MAX_BODY_SIZE)); -} - -#[test] -fn should_truncate_last_entry() { - let log_entries = vec![ - info_log_entry_with_timestamp(0), - info_log_entry_with_timestamp(1), - info_log_entry_with_timestamp(2), - ]; - let log_with_2_entries = Log { - entries: { - let mut entries = log_entries.clone(); - entries.pop(); - entries - }, - }; - let log_with_3_entries = Log { - entries: log_entries, - }; - - let serialized_log_with_2_entries = log_with_2_entries.serialize_logs(usize::MAX); - let serialized_log_with_3_entries = - log_with_3_entries.serialize_logs(serialized_log_with_2_entries.len()); - - assert_eq!(serialized_log_with_3_entries, serialized_log_with_2_entries); -} - -#[test] -fn should_show_all() { - set_log_filter(LogFilter::ShowAll); - log!(INFO_TEST, "ABC"); - log!(INFO_TEST, "123"); - log!(INFO_TEST, "!@#"); - assert_eq!(get_messages(), vec!["ABC", "123", "!@#"]); -} - -#[test] -fn should_hide_all() { - set_log_filter(LogFilter::HideAll); - log!(INFO_TEST, "ABC"); - log!(INFO_TEST, "123"); - log!(INFO_TEST, "!@#"); - assert_eq!(get_messages().len(), 0); -} - -#[test] -fn should_show_pattern() { - set_log_filter(LogFilter::ShowPattern("end$".into())); - log!(INFO_TEST, "message"); - log!(INFO_TEST, "message end"); - log!(INFO_TEST, "end message"); - assert_eq!(get_messages(), vec!["message end"]); -} - -#[test] -fn should_hide_pattern_including_message_type() { - set_log_filter(LogFilter::ShowPattern("^INFO_TEST [^ ]* 123".into())); - log!(INFO_TEST, "123"); - log!(INFO_TEST, "INFO_TEST 123"); - log!(INFO_TEST, ""); - log!(INFO_TEST, "123456"); - assert_eq!(get_messages(), vec!["123", "123456"]); -} - -#[test] -fn should_hide_pattern() { - set_log_filter(LogFilter::HidePattern("[ABC]".into())); - log!(INFO_TEST, "remove A"); - log!(INFO_TEST, "...B..."); - log!(INFO_TEST, "C"); - log!(INFO_TEST, "message"); - assert_eq!(get_messages(), vec!["message"]); -} diff --git a/libs/types/Cargo.toml b/libs/types/Cargo.toml index 32baecb8..40798c75 100644 --- a/libs/types/Cargo.toml +++ b/libs/types/Cargo.toml @@ -15,6 +15,6 @@ candid = { workspace = true } ic-cdk = { workspace = true } regex = { workspace = true } serde = { workspace = true } -sol_rpc_logs = { path = "../logs" } +canlog = { path = "../../canlog" } strum = { workspace = true } url = { workspace = true } diff --git a/libs/types/src/lifecycle/mod.rs b/libs/types/src/lifecycle/mod.rs index 7433b0d6..0afe3172 100644 --- a/libs/types/src/lifecycle/mod.rs +++ b/libs/types/src/lifecycle/mod.rs @@ -1,7 +1,7 @@ use crate::OverrideProvider; use candid::{CandidType, Principal}; use serde::Deserialize; -use sol_rpc_logs::LogFilter; +use canlog::LogFilter; /// The installation args for the Solana RPC canister #[derive(Clone, Debug, Default, CandidType, Deserialize)] diff --git a/rust-toolchain.toml b/rust-toolchain.toml index bd4541d2..bcb3be24 100644 --- a/rust-toolchain.toml +++ b/rust-toolchain.toml @@ -1,4 +1,4 @@ [toolchain] -channel = "1.81.0" +channel = "1.85.0" components = ["rustfmt", "clippy"] targets = ["wasm32-unknown-unknown"] From 4fb3226e4702d4ed91ff32049d448064f4361a2e Mon Sep 17 00:00:00 2001 From: Louis Pahlavi Date: Thu, 27 Feb 2025 16:57:39 +0100 Subject: [PATCH 12/20] address review feedback --- Cargo.lock | 25 ++-- Cargo.toml | 5 +- canister/Cargo.toml | 5 +- canister/src/lifecycle/mod.rs | 2 +- canister/src/logs/mod.rs | 6 +- canister/src/main.rs | 4 +- canlog/Cargo.toml | 6 +- canlog/src/lib.rs | 125 ++++++++-------- canlog/src/tests.rs | 226 +++++++++++++++++++++++++++++ canlog_derive/Cargo.toml | 2 - canlog_derive/src/lib.rs | 56 ++------ integration_tests/tests/tests.rs | 235 +------------------------------ libs/client/src/lib.rs | 6 +- rust-toolchain.toml | 2 +- 14 files changed, 325 insertions(+), 380 deletions(-) create mode 100644 canlog/src/tests.rs diff --git a/Cargo.lock b/Cargo.lock index 9df5d81d..1423c0ec 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1,6 +1,6 @@ # This file is automatically @generated by Cargo. # It is not intended for manual editing. -version = 3 +version = 4 [[package]] name = "addr2line" @@ -135,9 +135,9 @@ checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" [[package]] name = "base64ct" -version = "1.7.0" +version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c103cbbedac994e292597ab79342dbd5b306a362045095db54917d92a9fdfd92" +checksum = "8c3c1a368f70d6cf7302d78f8f7093da241fb8e8807c05cc9e51a125895a6d5b" [[package]] name = "basic_solana" @@ -460,9 +460,9 @@ name = "canlog" version = "0.1.0" dependencies = [ "candid", + "canlog_derive", "ic-canister-log", "ic-cdk", - "paste", "proptest", "regex", "serde", @@ -473,10 +473,8 @@ dependencies = [ name = "canlog_derive" version = "0.1.0" dependencies = [ - "canlog", "darling", "ic-canister-log", - "paste", "proc-macro2", "quote", "syn 2.0.98", @@ -1747,9 +1745,9 @@ checksum = "d26c52dbd32dccf2d10cac7725f8eae5296885fb5703b261f7d0a0739ec807ab" [[package]] name = "litemap" -version = "0.7.4" +version = "0.7.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4ee93343901ab17bd981295f2cf0026d4ad018c7c31ba84549a4ddbb47a45104" +checksum = "23fb14cb19457329c82206317a5663005a4d404783dc74f4252769b0d5f42856" [[package]] name = "lock_api" @@ -2941,19 +2939,16 @@ dependencies = [ "candid", "candid_parser", "canlog", - "canlog_derive", "ciborium", "const_format", "hex", "ic-cdk", "ic-stable-structures", - "paste", "proptest", "regex", "serde", "serde_bytes", "sol_rpc_types", - "strum 0.27.1", "url", "zeroize", ] @@ -4764,18 +4759,18 @@ dependencies = [ [[package]] name = "zerofrom" -version = "0.1.5" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cff3ee08c995dee1859d998dea82f7374f2826091dd9cd47def953cae446cd2e" +checksum = "50cc42e0333e05660c3587f3bf9d0478688e15d870fab3346451ce7f8c9fbea5" dependencies = [ "zerofrom-derive", ] [[package]] name = "zerofrom-derive" -version = "0.1.5" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "595eed982f7d355beb85837f651fa22e90b3c044842dc7f2c2842c086f295808" +checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502" dependencies = [ "proc-macro2", "quote", diff --git a/Cargo.toml b/Cargo.toml index b6efb3ea..f4ab8952 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -2,10 +2,10 @@ resolver = "2" members = [ "canister", "integration_tests", - "libs/client", - "libs/types", "canlog", "canlog_derive", + "libs/client", + "libs/types", "examples/basic_solana"] [workspace.package] @@ -41,7 +41,6 @@ ic-cdk = "0.17.1" ic-ed25519 = "0.1.0" ic-stable-structures = "0.6.7" ic-test-utilities-load-wasm = { git = "https://github.com/dfinity/ic", tag = "release-2025-01-23_03-04-base" } -paste = "1.0.15" pocket-ic = "6.0.0" proptest = "1.6.0" regex = "1.11.1" diff --git a/canister/Cargo.toml b/canister/Cargo.toml index 8764ee81..cf87b1c7 100644 --- a/canister/Cargo.toml +++ b/canister/Cargo.toml @@ -14,8 +14,7 @@ path = "src/main.rs" [dependencies] candid = { workspace = true } -canlog = { path = "../canlog" } -canlog_derive = { path = "../canlog_derive" } +canlog = { path = "../canlog", features = ["derive"] } ciborium = { workspace = true } const_format = { workspace = true } hex = { workspace = true } @@ -25,10 +24,8 @@ sol_rpc_types = { path = "../libs/types" } regex = { workspace = true } serde = { workspace = true } serde_bytes = { workspace = true } -strum = { workspace = true } url = { workspace = true } zeroize = { workspace = true } -paste = "1.0.15" [dev-dependencies] candid_parser = { workspace = true } diff --git a/canister/src/lifecycle/mod.rs b/canister/src/lifecycle/mod.rs index bf25a74d..39e5441c 100644 --- a/canister/src/lifecycle/mod.rs +++ b/canister/src/lifecycle/mod.rs @@ -2,7 +2,7 @@ use crate::{ logs::Priority, state::{init_state, mutate_state, State}, }; -use canlog::{log, LogPriorityLevels}; +use canlog::log; use sol_rpc_types::InstallArgs; pub fn init(args: InstallArgs) { diff --git a/canister/src/logs/mod.rs b/canister/src/logs/mod.rs index 5305817d..4a608a7f 100644 --- a/canister/src/logs/mod.rs +++ b/canister/src/logs/mod.rs @@ -1,9 +1,9 @@ use crate::state::read_state; -use canlog::{GetLogFilter, LogFilter}; -use std::str::FromStr; +use canlog::{GetLogFilter, LogFilter, LogPriorityLevels}; use serde::{Deserialize, Serialize}; +use std::str::FromStr; -#[derive(canlog_derive::LogPriorityLevels, Serialize, Deserialize, PartialEq, Debug, Copy, Clone)] +#[derive(LogPriorityLevels, Serialize, Deserialize, PartialEq, Debug, Copy, Clone)] pub enum Priority { #[log_level(capacity = 1000, name = "INFO")] Info, diff --git a/canister/src/main.rs b/canister/src/main.rs index d232e7c1..5027043f 100644 --- a/canister/src/main.rs +++ b/canister/src/main.rs @@ -1,5 +1,5 @@ use candid::candid_method; -use canlog::{log, Log, LogPriorityLevels, Sort}; +use canlog::{log, Log, Sort}; use ic_cdk::{ api::is_controller, {query, update}, @@ -71,7 +71,7 @@ async fn update_api_keys(api_keys: Vec<(ProviderId, Option)>) { #[query(hidden = true)] fn http_request(request: http_types::HttpRequest) -> http_types::HttpResponse { match request.path() { - "/canlog" => { + "/log" => { let max_skip_timestamp = match request.raw_query_param("time") { Some(arg) => match u64::from_str(arg) { Ok(value) => value, diff --git a/canlog/Cargo.toml b/canlog/Cargo.toml index 2309bb48..ff0e982c 100644 --- a/canlog/Cargo.toml +++ b/canlog/Cargo.toml @@ -12,12 +12,16 @@ include = ["src", "Cargo.toml", "CHANGELOG.md", "LICENSE", "README.md"] [dependencies] candid = { workspace = true } +canlog_derive = { path = "../canlog_derive", optional = true } ic-canister-log = { workspace = true } ic-cdk = { workspace = true } -paste = { workspace = true } regex = { workspace = true } serde = { workspace = true } serde_json = { workspace = true } [dev-dependencies] proptest = { workspace = true } +canlog_derive = { path = "../canlog_derive" } + +[features] +derive = ["dep:canlog_derive"] diff --git a/canlog/src/lib.rs b/canlog/src/lib.rs index 5947df01..ed83fb8e 100644 --- a/canlog/src/lib.rs +++ b/canlog/src/lib.rs @@ -1,81 +1,76 @@ -//! Crate for managing canister canlog +//! This crate extends [`ic_canister_log`] to provide native support for log priority levels, +//! filtering and sorting. +//! +//! The main functionality is provided by the [`LogPriorityLevels`] and [`GetLogFilter`] traits +//! as well as the [`log`] macro. +//! +//! Custom log priority levels may be defined by declaring an enum and implementing the +//! [`LogPriorityLevels`] trait for it, usually through the [`derive`] annotation available with +//! the `derive` feature of [`canlog`]. +//! +//! Additionally, log filtering may be achieved by implementing the [`GetLogFilter`] trait on +//! the enum defining the log priorities. +//! +//! * Example: +//! ```rust +//! # #[cfg(feature="derive")] +//! # mod wrapper_module { +//! use canlog::{GetLogFilter, LogFilter, LogPriorityLevels, log}; +//! +//! #[derive(LogPriorityLevels)] +//! enum LogPriority { +//! #[log_level(capacity = 100, name = "INFO")] +//! Info, +//! #[log_level(capacity = 500, name = "DEBUG")] +//! Debug, +//! } +//! +//! impl GetLogFilter for LogPriority { +//! fn get_log_filter() -> LogFilter { +//! LogFilter::ShowAll +//! } +//! } +//! +//! fn main() { +//! log!(LogPriority::Info, "Some rather important message."); +//! log!(LogPriority::Debug, "Some less important message."); +//! } +//! # } +//! ``` +//! +//! **Expected Output:** +//! ```text +//! 2025-02-26 08:27:10 UTC: [Canister lxzze-o7777-77777-aaaaa-cai] INFO main.rs:13 Some rather important message. +//! 2025-02-26 08:27:10 UTC: [Canister lxzze-o7777-77777-aaaaa-cai] DEBUG main.rs:14 Some less important message. +//! ``` #![forbid(unsafe_code)] #![forbid(missing_docs)] +#[cfg(test)] +mod tests; mod types; +extern crate self as canlog; + pub use crate::types::{LogFilter, Sort}; -pub use ic_canister_log::{declare_log_buffer, export as export_logs, GlobalBuffer, Sink, log as raw_log}; +#[cfg(any(feature = "derive", test))] +pub use canlog_derive::*; +pub use ic_canister_log::{ + declare_log_buffer, export as export_logs, log as raw_log, GlobalBuffer, Sink, +}; use serde::{Deserialize, Serialize}; /// Wrapper for the [`ic_canister_log::log`] macro that allows logging /// for a given variant of an enum implementing the [`LogPriorityLevels`] -/// trait. -/// -/// **Usage Example:** -/// ```rust -/// use canlog::{GetLogFilter, LogFilter, LogPriorityLevels, log, declare_log_buffer, PrintProxySink}; -/// -/// enum LogPriority { -/// Info, -/// Debug, -/// } -/// -/// declare_log_buffer!(name = INFO_BUF, capacity = 100); -/// declare_log_buffer!(name = DEBUG_BUF, capacity = 500); -/// -/// const INFO: PrintProxySink = PrintProxySink(&LogPriority::Info, &INFO_BUF); -/// const DEBUG: PrintProxySink = PrintProxySink(&LogPriority::Info, &DEBUG_BUF); -/// -/// impl LogPriorityLevels for LogPriority { -/// fn get_buffer(&self) -> &'static canlog::GlobalBuffer { -/// match self { -/// Self::Info => &INFO_BUF, -/// Self::Debug => &DEBUG_BUF, -/// } -/// } -/// -/// fn get_sink(&self) -> &impl canlog::Sink { -/// match self { -/// Self::Info => &INFO, -/// Self::Debug => &DEBUG, -/// } -/// } -/// -/// fn display_name(&self) -> &'static str { -/// match self { -/// Self::Info => "INFO", -/// Self::Debug => "DEBUG", -/// } -/// } -/// -/// fn get_priorities() -> &'static [Self] { -/// &[Self::Info, Self::Debug] -/// } -/// } -/// -/// impl GetLogFilter for LogPriority { -/// fn get_log_filter() -> LogFilter { -/// LogFilter::ShowAll -/// } -/// } -/// -/// fn main() { -/// log!(LogPriority::Info, "Some rather important message."); -/// log!(LogPriority::Debug, "Some less important message."); -/// } -/// ``` -/// -/// **Expected Output:** -/// ```text -/// 2025-02-26 08:27:10 UTC: [Canister lxzze-o7777-77777-aaaaa-cai] INFO main.rs:13 Some rather important message. -/// 2025-02-26 08:27:10 UTC: [Canister lxzze-o7777-77777-aaaaa-cai] DEBUG main.rs:14 Some less important message. -/// ``` +/// trait. See the example in the crate documentation. #[macro_export] macro_rules! log { ($enum_variant:expr, $($args:tt)*) => { - canlog::raw_log!($enum_variant.get_sink(), $($args)*); + { + use ::canlog::LogPriorityLevels; + ::canlog::raw_log!($enum_variant.get_sink(), $($args)*); + } }; } @@ -159,7 +154,7 @@ where .for_each(|priority| self.push_logs(*priority)); } - /// Serialize the canlog contained in `entries` into a JSON string. + /// Serialize the logs contained in `entries` into a JSON string. /// /// If the resulting string is larger than `max_body_size` bytes, /// truncate `entries` so the resulting serialized JSON string diff --git a/canlog/src/tests.rs b/canlog/src/tests.rs new file mode 100644 index 00000000..c31f20c7 --- /dev/null +++ b/canlog/src/tests.rs @@ -0,0 +1,226 @@ +use crate::{log, GetLogFilter, Log, LogEntry, LogFilter, LogPriorityLevels, Sort}; +use proptest::{prop_assert, proptest}; +use serde::{Deserialize, Serialize}; +use std::cell::RefCell; + +thread_local! { + static LOG_FILTER: RefCell = RefCell::default(); +} + +#[derive(Clone, Copy, Serialize, Deserialize, LogPriorityLevels)] +enum TestPriority { + #[log_level(capacity = 1000, name = "INFO_TEST")] + Info, +} + +impl GetLogFilter for TestPriority { + fn get_log_filter() -> LogFilter { + LOG_FILTER.with(|cell| cell.borrow().clone()) + } +} + +fn set_log_filter(filter: LogFilter) { + LOG_FILTER.set(filter); +} + +fn info_log_entry_with_timestamp(timestamp: u64) -> LogEntry { + LogEntry { + timestamp, + priority: TestPriority::Info, + file: String::default(), + line: 0, + message: String::default(), + counter: 0, + } +} + +fn is_ascending(log: &Log) -> bool { + for i in 0..log.entries.len() - 1 { + if log.entries[i].timestamp > log.entries[i + 1].timestamp { + return false; + } + } + true +} + +fn is_descending(log: &Log) -> bool { + for i in 0..log.entries.len() - 1 { + if log.entries[i].timestamp < log.entries[i + 1].timestamp { + return false; + } + } + true +} + +fn get_messages() -> Vec { + canlog::export_logs(TestPriority::Info.get_buffer()) + .into_iter() + .map(|entry| entry.message) + .collect() +} + +proptest! { + #[test] + fn logs_always_fit_in_message( + number_of_entries in 1..100_usize, + entry_size in 1..10000_usize, + max_body_size in 100..10000_usize + ) { + let mut entries: Vec> = vec![]; + for _ in 0..number_of_entries { + entries.push(LogEntry { + timestamp: 0, + priority: TestPriority::Info, + file: String::default(), + line: 0, + message: "1".repeat(entry_size), + counter: 0, + }); + } + let log = Log { entries }; + let truncated_logs_json_len = log.serialize_logs(max_body_size).len(); + prop_assert!(truncated_logs_json_len <= max_body_size); + } +} + +#[test] +fn sorting_order() { + let mut log = Log { entries: vec![] }; + log.entries.push(info_log_entry_with_timestamp(2)); + log.entries.push(info_log_entry_with_timestamp(0)); + log.entries.push(info_log_entry_with_timestamp(1)); + + log.sort_logs(Sort::Ascending); + assert!(is_ascending(&log)); + + log.sort_logs(Sort::Descending); + assert!(is_descending(&log)); +} + +#[test] +fn simple_logs_truncation() { + let mut entries: Vec> = vec![]; + const MAX_BODY_SIZE: usize = 3_000_000; + + for _ in 0..10 { + entries.push(LogEntry { + timestamp: 0, + priority: TestPriority::Info, + file: String::default(), + line: 0, + message: String::default(), + counter: 0, + }); + } + let log = Log { + entries: entries.clone(), + }; + let small_len = serde_json::to_string(&log).unwrap_or_default().len(); + + entries.push(LogEntry { + timestamp: 0, + priority: TestPriority::Info, + file: String::default(), + line: 0, + message: "1".repeat(MAX_BODY_SIZE), + counter: 0, + }); + let log = Log { entries }; + let entries_json = serde_json::to_string(&log).unwrap_or_default(); + assert!(entries_json.len() > MAX_BODY_SIZE); + + let truncated_logs_json = log.serialize_logs(MAX_BODY_SIZE); + + assert_eq!(small_len, truncated_logs_json.len()); +} + +#[test] +fn one_entry_too_big() { + let mut entries: Vec> = vec![]; + const MAX_BODY_SIZE: usize = 3_000_000; + + entries.push(LogEntry { + timestamp: 0, + priority: TestPriority::Info, + file: String::default(), + line: 0, + message: "1".repeat(MAX_BODY_SIZE), + counter: 0, + }); + let log = Log { entries }; + let truncated_logs_json_len = log.serialize_logs(MAX_BODY_SIZE).len(); + assert!(truncated_logs_json_len < MAX_BODY_SIZE); + assert_eq!("{\"entries\":[]}", log.serialize_logs(MAX_BODY_SIZE)); +} + +#[test] +fn should_truncate_last_entry() { + let log_entries = vec![ + info_log_entry_with_timestamp(0), + info_log_entry_with_timestamp(1), + info_log_entry_with_timestamp(2), + ]; + let log_with_2_entries = Log { + entries: { + let mut entries = log_entries.clone(); + entries.pop(); + entries + }, + }; + let log_with_3_entries = Log { + entries: log_entries, + }; + + let serialized_log_with_2_entries = log_with_2_entries.serialize_logs(usize::MAX); + let serialized_log_with_3_entries = + log_with_3_entries.serialize_logs(serialized_log_with_2_entries.len()); + + assert_eq!(serialized_log_with_3_entries, serialized_log_with_2_entries); +} + +#[test] +fn should_show_all() { + set_log_filter(LogFilter::ShowAll); + log!(TestPriority::Info, "ABC"); + log!(TestPriority::Info, "123"); + log!(TestPriority::Info, "!@#"); + assert_eq!(get_messages(), vec!["ABC", "123", "!@#"]); +} + +#[test] +fn should_hide_all() { + set_log_filter(LogFilter::HideAll); + log!(TestPriority::Info, "ABC"); + log!(TestPriority::Info, "123"); + log!(TestPriority::Info, "!@#"); + assert_eq!(get_messages().len(), 0); +} + +#[test] +fn should_show_pattern() { + set_log_filter(LogFilter::ShowPattern("end$".into())); + log!(TestPriority::Info, "message"); + log!(TestPriority::Info, "message end"); + log!(TestPriority::Info, "end message"); + assert_eq!(get_messages(), vec!["message end"]); +} + +#[test] +fn should_hide_pattern_including_message_type() { + set_log_filter(LogFilter::ShowPattern("^INFO_TEST [^ ]* 123".into())); + log!(TestPriority::Info, "123"); + log!(TestPriority::Info, "INFO_TEST 123"); + log!(TestPriority::Info, ""); + log!(TestPriority::Info, "123456"); + assert_eq!(get_messages(), vec!["123", "123456"]); +} + +#[test] +fn should_hide_pattern() { + set_log_filter(LogFilter::HidePattern("[ABC]".into())); + log!(TestPriority::Info, "remove A"); + log!(TestPriority::Info, "...B..."); + log!(TestPriority::Info, "C"); + log!(TestPriority::Info, "message"); + assert_eq!(get_messages(), vec!["message"]); +} diff --git a/canlog_derive/Cargo.toml b/canlog_derive/Cargo.toml index cc2fc410..b316a0a3 100644 --- a/canlog_derive/Cargo.toml +++ b/canlog_derive/Cargo.toml @@ -11,9 +11,7 @@ readme = "README.md" include = ["src", "Cargo.toml", "CHANGELOG.md", "LICENSE", "README.md"] [dependencies] -canlog = { path = "../canlog" } ic-canister-log = { workspace = true } -paste = { workspace = true } syn = { version = "2.0.98", features = ["derive"] } quote = "1.0.38" diff --git a/canlog_derive/src/lib.rs b/canlog_derive/src/lib.rs index 7108b001..4a960e23 100644 --- a/canlog_derive/src/lib.rs +++ b/canlog_derive/src/lib.rs @@ -1,4 +1,4 @@ -//! Crate for managing canister canlog +//! Procedural macros for the canlog crate #![forbid(unsafe_code)] #![forbid(missing_docs)] @@ -12,48 +12,12 @@ use syn::{parse_macro_input, Data, DataEnum, DeriveInput}; /// A procedural macro to implement [`LogPriorityLevels`](canlog::LogPriorityLevels) for an enum. /// /// This macro expects the variants to be annotated with `#[log_level(capacity = N, name = "NAME")]` -/// where `N` is an integer representing buffer capacity and `"NAME"` is a string display +/// where `N` is an integer representing buffer capacity and `"NAME"` is a string display /// representation for the corresponding log level. -/// -/// The enum annotated with `#[derive(LogPriorityLevels)]` must also implement the -/// [`Serialize`](serde::Serialize), [`Deserialize`](serde::Deserialize), -/// [`Clone`](core::clone::Clone) and [`Copy`](core::marker::Copy) traits -/// -/// **Generated Code:** -/// 1. Declares a [`GlobalBuffer`](ic_canister_log::GlobalBuffer) and -/// [`Sink`](ic_canister_log::Sink) constant for each variant. -/// 2. Implements the [LogPriorityLevels](canlog::LogPriorityLevels) trait for the enum. -/// -/// **Usage Example:** -/// ```rust -/// use canlog::{GetLogFilter, LogFilter, LogPriorityLevels, log}; -/// use canlog_derive::LogPriorityLevels; -/// -/// #[derive(LogPriorityLevels)] -/// enum LogPriority { -/// #[log_level(capacity = 100, name = "INFO")] -/// Info, -/// #[log_level(capacity = 500, name = "DEBUG")] -/// Debug, -/// } /// -/// impl GetLogFilter for LogPriority { -/// fn get_log_filter() -> LogFilter { -/// LogFilter::ShowAll -/// } -/// } -/// -/// fn main() { -/// log!(LogPriority::Info, "Some rather important message."); -/// log!(LogPriority::Debug, "Some less important message."); -/// } -/// ``` -/// -/// **Expected Output:** -/// ```text -/// 2025-02-26 08:27:10 UTC: [Canister lxzze-o7777-77777-aaaaa-cai] INFO main.rs:13 Some rather important message. -/// 2025-02-26 08:27:10 UTC: [Canister lxzze-o7777-77777-aaaaa-cai] DEBUG main.rs:14 Some less important message. -/// ``` +/// The enum annotated with `#[derive(LogPriorityLevels)]` must also implement the +/// [`Serialize`](serde::Serialize), [`Deserialize`](serde::Deserialize), +/// [`Clone`](core::clone::Clone) and [`Copy`](core::marker::Copy) traits #[proc_macro_derive(LogPriorityLevels, attributes(log_level))] pub fn derive_log_priority(input: TokenStream) -> TokenStream { let input = parse_macro_input!(input as DeriveInput); @@ -74,8 +38,8 @@ pub fn derive_log_priority(input: TokenStream) -> TokenStream { let capacity = info.capacity; quote! { - canlog::declare_log_buffer!(name = #buffer_ident, capacity = #capacity); - pub const #sink_ident: canlog::PrintProxySink<#enum_ident> = canlog::PrintProxySink(&#enum_ident::#variant_ident, &#buffer_ident); + ::canlog::declare_log_buffer!(name = #buffer_ident, capacity = #capacity); + pub const #sink_ident: ::canlog::PrintProxySink<#enum_ident> = ::canlog::PrintProxySink(&#enum_ident::#variant_ident, &#buffer_ident); } }); @@ -110,14 +74,14 @@ pub fn derive_log_priority(input: TokenStream) -> TokenStream { let trait_impl = quote! { #(#buffer_declarations)* - impl canlog::LogPriorityLevels for #enum_ident { - fn get_buffer(&self) -> &'static canlog::GlobalBuffer { + impl ::canlog::LogPriorityLevels for #enum_ident { + fn get_buffer(&self) -> &'static ::canlog::GlobalBuffer { match self { #(#buffer_match_arms)* } } - fn get_sink(&self) -> &impl canlog::Sink { + fn get_sink(&self) -> &impl ::canlog::Sink { match self { #(#sink_match_arms)* } diff --git a/integration_tests/tests/tests.rs b/integration_tests/tests/tests.rs index 203c7b51..fe45b3d5 100644 --- a/integration_tests/tests/tests.rs +++ b/integration_tests/tests/tests.rs @@ -43,7 +43,7 @@ mod retrieve_logs_tests { assert_eq!(client.retrieve_logs("DEBUG").await, vec![]); assert_eq!(client.retrieve_logs("INFO").await, vec![]); - // Generate some canlog + // Generate some log setup .client() .with_caller(setup.controller()) @@ -188,236 +188,3 @@ mod canister_upgrade_tests { .await; } } - -use canlog::LogFilter; -use std::cell::RefCell; - -thread_local! { - static LOG_FILTER: RefCell = RefCell::default(); -} - -mod logging_tests { - use super::*; - use canlog::{log, GetLogFilter, Log, LogEntry, LogPriorityLevels, Sort}; - use canlog_derive::LogPriorityLevels; - use proptest::{prop_assert, proptest}; - use serde::{Deserialize, Serialize}; - - #[derive(Clone, Copy, Serialize, Deserialize, LogPriorityLevels)] - enum TestPriority { - #[log_level(capacity = 1000, name = "INFO_TEST")] - Info, - } - - impl GetLogFilter for TestPriority { - fn get_log_filter() -> LogFilter { - LOG_FILTER.with(|cell| cell.borrow().clone()) - } - } - - fn set_log_filter(filter: LogFilter) { - LOG_FILTER.set(filter); - } - - fn info_log_entry_with_timestamp(timestamp: u64) -> LogEntry { - LogEntry { - timestamp, - priority: TestPriority::Info, - file: String::default(), - line: 0, - message: String::default(), - counter: 0, - } - } - - fn is_ascending(log: &Log) -> bool { - for i in 0..log.entries.len() - 1 { - if log.entries[i].timestamp > log.entries[i + 1].timestamp { - return false; - } - } - true - } - - fn is_descending(log: &Log) -> bool { - for i in 0..log.entries.len() - 1 { - if log.entries[i].timestamp < log.entries[i + 1].timestamp { - return false; - } - } - true - } - - fn get_messages() -> Vec { - canlog::export_logs(TestPriority::Info.get_buffer()) - .into_iter() - .map(|entry| entry.message) - .collect() - } - - proptest! { - #[test] - fn logs_always_fit_in_message( - number_of_entries in 1..100_usize, - entry_size in 1..10000_usize, - max_body_size in 100..10000_usize - ) { - let mut entries: Vec> = vec![]; - for _ in 0..number_of_entries { - entries.push(LogEntry { - timestamp: 0, - priority: TestPriority::Info, - file: String::default(), - line: 0, - message: "1".repeat(entry_size), - counter: 0, - }); - } - let log = Log { entries }; - let truncated_logs_json_len = log.serialize_logs(max_body_size).len(); - prop_assert!(truncated_logs_json_len <= max_body_size); - } - } - - #[test] - fn sorting_order() { - let mut log = Log { entries: vec![] }; - log.entries.push(info_log_entry_with_timestamp(2)); - log.entries.push(info_log_entry_with_timestamp(0)); - log.entries.push(info_log_entry_with_timestamp(1)); - - log.sort_logs(Sort::Ascending); - assert!(is_ascending(&log)); - - log.sort_logs(Sort::Descending); - assert!(is_descending(&log)); - } - - #[test] - fn simple_logs_truncation() { - let mut entries: Vec> = vec![]; - const MAX_BODY_SIZE: usize = 3_000_000; - - for _ in 0..10 { - entries.push(LogEntry { - timestamp: 0, - priority: TestPriority::Info, - file: String::default(), - line: 0, - message: String::default(), - counter: 0, - }); - } - let log = Log { - entries: entries.clone(), - }; - let small_len = serde_json::to_string(&log).unwrap_or_default().len(); - - entries.push(LogEntry { - timestamp: 0, - priority: TestPriority::Info, - file: String::default(), - line: 0, - message: "1".repeat(MAX_BODY_SIZE), - counter: 0, - }); - let log = Log { entries }; - let entries_json = serde_json::to_string(&log).unwrap_or_default(); - assert!(entries_json.len() > MAX_BODY_SIZE); - - let truncated_logs_json = log.serialize_logs(MAX_BODY_SIZE); - - assert_eq!(small_len, truncated_logs_json.len()); - } - - #[test] - fn one_entry_too_big() { - let mut entries: Vec> = vec![]; - const MAX_BODY_SIZE: usize = 3_000_000; - - entries.push(LogEntry { - timestamp: 0, - priority: TestPriority::Info, - file: String::default(), - line: 0, - message: "1".repeat(MAX_BODY_SIZE), - counter: 0, - }); - let log = Log { entries }; - let truncated_logs_json_len = log.serialize_logs(MAX_BODY_SIZE).len(); - assert!(truncated_logs_json_len < MAX_BODY_SIZE); - assert_eq!("{\"entries\":[]}", log.serialize_logs(MAX_BODY_SIZE)); - } - - #[test] - fn should_truncate_last_entry() { - let log_entries = vec![ - info_log_entry_with_timestamp(0), - info_log_entry_with_timestamp(1), - info_log_entry_with_timestamp(2), - ]; - let log_with_2_entries = Log { - entries: { - let mut entries = log_entries.clone(); - entries.pop(); - entries - }, - }; - let log_with_3_entries = Log { - entries: log_entries, - }; - - let serialized_log_with_2_entries = log_with_2_entries.serialize_logs(usize::MAX); - let serialized_log_with_3_entries = - log_with_3_entries.serialize_logs(serialized_log_with_2_entries.len()); - - assert_eq!(serialized_log_with_3_entries, serialized_log_with_2_entries); - } - - #[test] - fn should_show_all() { - set_log_filter(LogFilter::ShowAll); - log!(TestPriority::Info, "ABC"); - log!(TestPriority::Info, "123"); - log!(TestPriority::Info, "!@#"); - assert_eq!(get_messages(), vec!["ABC", "123", "!@#"]); - } - - #[test] - fn should_hide_all() { - set_log_filter(LogFilter::HideAll); - log!(TestPriority::Info, "ABC"); - log!(TestPriority::Info, "123"); - log!(TestPriority::Info, "!@#"); - assert_eq!(get_messages().len(), 0); - } - - #[test] - fn should_show_pattern() { - set_log_filter(LogFilter::ShowPattern("end$".into())); - log!(TestPriority::Info, "message"); - log!(TestPriority::Info, "message end"); - log!(TestPriority::Info, "end message"); - assert_eq!(get_messages(), vec!["message end"]); - } - - #[test] - fn should_hide_pattern_including_message_type() { - set_log_filter(LogFilter::ShowPattern("^INFO_TEST [^ ]* 123".into())); - log!(TestPriority::Info, "123"); - log!(TestPriority::Info, "INFO_TEST 123"); - log!(TestPriority::Info, ""); - log!(TestPriority::Info, "123456"); - assert_eq!(get_messages(), vec!["123", "123456"]); - } - - #[test] - fn should_hide_pattern() { - set_log_filter(LogFilter::HidePattern("[ABC]".into())); - log!(TestPriority::Info, "remove A"); - log!(TestPriority::Info, "...B..."); - log!(TestPriority::Info, "C"); - log!(TestPriority::Info, "message"); - assert_eq!(get_messages(), vec!["message"]); - } -} diff --git a/libs/client/src/lib.rs b/libs/client/src/lib.rs index bfabfb57..f9e1dd28 100644 --- a/libs/client/src/lib.rs +++ b/libs/client/src/lib.rs @@ -101,8 +101,8 @@ impl SolRpcClient { /// Retrieve canlog from the SOL RPC canister from the HTTP endpoint. pub async fn retrieve_logs(&self, priority: &str) -> Vec> { let request = HttpRequest { - method: "".to_string(), - url: format!("/canlog?priority={priority}"), + method: "POST".to_string(), + url: format!("/log?priority={priority}"), headers: vec![], body: serde_bytes::ByteBuf::new(), }; @@ -112,7 +112,7 @@ impl SolRpcClient { .await .unwrap(); serde_json::from_slice::>(&response.body) - .expect("failed to parse SOL RPC canister canlog") + .expect("failed to parse SOL RPC canister log") .entries } } diff --git a/rust-toolchain.toml b/rust-toolchain.toml index bcb3be24..bd4541d2 100644 --- a/rust-toolchain.toml +++ b/rust-toolchain.toml @@ -1,4 +1,4 @@ [toolchain] -channel = "1.85.0" +channel = "1.81.0" components = ["rustfmt", "clippy"] targets = ["wasm32-unknown-unknown"] From f364b5384498a27924c8560b6713cb3314876302 Mon Sep 17 00:00:00 2001 From: Louis Pahlavi Date: Thu, 27 Feb 2025 17:00:10 +0100 Subject: [PATCH 13/20] remove unnecessary integration test dependencies --- Cargo.lock | 4 ---- integration_tests/Cargo.toml | 4 ---- 2 files changed, 8 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 1423c0ec..910885c6 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2974,14 +2974,10 @@ version = "0.1.0" dependencies = [ "async-trait", "candid", - "canlog", - "canlog_derive", "ic-cdk", "ic-test-utilities-load-wasm", "pocket-ic", - "proptest", "serde", - "serde_json", "sol_rpc_client", "sol_rpc_types", "tokio", diff --git a/integration_tests/Cargo.toml b/integration_tests/Cargo.toml index ada18fc5..20cba9f8 100644 --- a/integration_tests/Cargo.toml +++ b/integration_tests/Cargo.toml @@ -10,14 +10,10 @@ license.workspace = true [dependencies] async-trait = { workspace = true } candid = { workspace = true } -canlog = { path = "../canlog" } -canlog_derive = { path = "../canlog_derive" } ic-cdk = { workspace = true } ic-test-utilities-load-wasm = { workspace = true } pocket-ic = { workspace = true } -proptest = { workspace = true } serde = { workspace = true } -serde_json = { workspace = true } sol_rpc_client = { path = "../libs/client" } sol_rpc_types = { path = "../libs/types" } From 0c372290bb63ba5d2434b3c263abf42185477f0a Mon Sep 17 00:00:00 2001 From: Louis Pahlavi Date: Fri, 28 Feb 2025 10:39:49 +0100 Subject: [PATCH 14/20] Fix logs endpoint name --- canister/src/main.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/canister/src/main.rs b/canister/src/main.rs index 5027043f..5f80aea9 100644 --- a/canister/src/main.rs +++ b/canister/src/main.rs @@ -71,7 +71,7 @@ async fn update_api_keys(api_keys: Vec<(ProviderId, Option)>) { #[query(hidden = true)] fn http_request(request: http_types::HttpRequest) -> http_types::HttpResponse { match request.path() { - "/log" => { + "/logs" => { let max_skip_timestamp = match request.raw_query_param("time") { Some(arg) => match u64::from_str(arg) { Ok(value) => value, From c0f08a64d589c6f3b5d8a9cd3ce52a684215c687 Mon Sep 17 00:00:00 2001 From: Louis Pahlavi Date: Fri, 28 Feb 2025 10:44:22 +0100 Subject: [PATCH 15/20] Fix broken LICENSE symlinks --- canlog/LICENSE | 2 +- canlog_derive/LICENSE | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/canlog/LICENSE b/canlog/LICENSE index 30cff740..ea5b6064 120000 --- a/canlog/LICENSE +++ b/canlog/LICENSE @@ -1 +1 @@ -../../LICENSE \ No newline at end of file +../LICENSE \ No newline at end of file diff --git a/canlog_derive/LICENSE b/canlog_derive/LICENSE index 30cff740..ea5b6064 120000 --- a/canlog_derive/LICENSE +++ b/canlog_derive/LICENSE @@ -1 +1 @@ -../../LICENSE \ No newline at end of file +../LICENSE \ No newline at end of file From d7fcdf2bb7dff87c2da9c6aff591cd3b8b1aec49 Mon Sep 17 00:00:00 2001 From: Louis Pahlavi Date: Fri, 28 Feb 2025 11:06:30 +0100 Subject: [PATCH 16/20] Move retrieve log method to integration test crate --- Cargo.lock | 8 ++++---- integration_tests/Cargo.toml | 4 ++++ integration_tests/src/lib.rs | 23 +++++++++++++++++++++++ libs/client/Cargo.toml | 6 +----- libs/client/src/lib.rs | 23 ----------------------- 5 files changed, 32 insertions(+), 32 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 910885c6..97007358 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2959,12 +2959,8 @@ version = "0.1.0" dependencies = [ "async-trait", "candid", - "canlog", "ic-cdk", "serde", - "serde_bytes", - "serde_json", - "sol_rpc_canister", "sol_rpc_types", ] @@ -2974,10 +2970,14 @@ version = "0.1.0" dependencies = [ "async-trait", "candid", + "canlog", "ic-cdk", "ic-test-utilities-load-wasm", "pocket-ic", "serde", + "serde_bytes", + "serde_json", + "sol_rpc_canister", "sol_rpc_client", "sol_rpc_types", "tokio", diff --git a/integration_tests/Cargo.toml b/integration_tests/Cargo.toml index 20cba9f8..0872c083 100644 --- a/integration_tests/Cargo.toml +++ b/integration_tests/Cargo.toml @@ -10,10 +10,14 @@ license.workspace = true [dependencies] async-trait = { workspace = true } candid = { workspace = true } +canlog = { path = "../canlog" } ic-cdk = { workspace = true } ic-test-utilities-load-wasm = { workspace = true } pocket-ic = { workspace = true } serde = { workspace = true } +serde_bytes = { workspace = true } +serde_json = { workspace = true } +sol_rpc_canister = { path = "../canister" } sol_rpc_client = { path = "../libs/client" } sol_rpc_types = { path = "../libs/types" } diff --git a/integration_tests/src/lib.rs b/integration_tests/src/lib.rs index 8acb5233..d4240f19 100644 --- a/integration_tests/src/lib.rs +++ b/integration_tests/src/lib.rs @@ -1,10 +1,15 @@ use async_trait::async_trait; use candid::utils::ArgumentEncoder; use candid::{decode_args, encode_args, CandidType, Encode, Principal}; +use canlog::{Log, LogEntry}; use ic_cdk::api::call::RejectionCode; use pocket_ic::management_canister::{CanisterId, CanisterSettings}; use pocket_ic::{nonblocking::PocketIc, PocketIcBuilder, UserError, WasmResult}; use serde::de::DeserializeOwned; +use sol_rpc_canister::{ + http_types::{HttpRequest, HttpResponse}, + logs::Priority, +}; use sol_rpc_client::{Runtime, SolRpcClient}; use sol_rpc_types::{InstallArgs, ProviderId}; use std::path::PathBuf; @@ -191,6 +196,7 @@ impl PocketIcRuntime<'_> { #[async_trait] pub trait SolRpcTestClient { async fn verify_api_key(&self, api_key: (ProviderId, Option)); + async fn retrieve_logs(&self, priority: &str) -> Vec>; fn with_caller>(self, id: T) -> Self; } @@ -203,6 +209,23 @@ impl SolRpcTestClient> for SolRpcClient> .unwrap() } + async fn retrieve_logs(&self, priority: &str) -> Vec> { + let request = HttpRequest { + method: "POST".to_string(), + url: format!("/logs?priority={priority}"), + headers: vec![], + body: serde_bytes::ByteBuf::new(), + }; + let response: HttpResponse = self + .runtime + .query_call(self.sol_rpc_canister, "http_request", (request,)) + .await + .unwrap(); + serde_json::from_slice::>(&response.body) + .expect("failed to parse SOL RPC canister log") + .entries + } + fn with_caller>(mut self, id: T) -> Self { self.runtime.caller = id.into(); self diff --git a/libs/client/Cargo.toml b/libs/client/Cargo.toml index 5bc07642..a37b8f62 100644 --- a/libs/client/Cargo.toml +++ b/libs/client/Cargo.toml @@ -15,8 +15,4 @@ async-trait = { workspace = true } candid = { workspace = true } ic-cdk = { workspace = true } serde = { workspace = true } -serde_bytes = { workspace = true } -serde_json = { workspace = true } -canlog = { path = "../../canlog" } -sol_rpc_types = { path = "../types" } -sol_rpc_canister = { path = "../../canister" } \ No newline at end of file +sol_rpc_types = { path = "../types" } \ No newline at end of file diff --git a/libs/client/src/lib.rs b/libs/client/src/lib.rs index f9e1dd28..695da69b 100644 --- a/libs/client/src/lib.rs +++ b/libs/client/src/lib.rs @@ -8,11 +8,6 @@ use candid::utils::ArgumentEncoder; use candid::{CandidType, Principal}; use ic_cdk::api::call::RejectionCode; use serde::de::DeserializeOwned; -use sol_rpc_canister::{ - http_types::{HttpRequest, HttpResponse}, - logs::Priority, -}; -use canlog::{Log, LogEntry}; use sol_rpc_types::ProviderId; /// Abstract the canister runtime so that the client code can be reused: @@ -97,24 +92,6 @@ impl SolRpcClient { .await .unwrap() } - - /// Retrieve canlog from the SOL RPC canister from the HTTP endpoint. - pub async fn retrieve_logs(&self, priority: &str) -> Vec> { - let request = HttpRequest { - method: "POST".to_string(), - url: format!("/log?priority={priority}"), - headers: vec![], - body: serde_bytes::ByteBuf::new(), - }; - let response: HttpResponse = self - .runtime - .query_call(self.sol_rpc_canister, "http_request", (request,)) - .await - .unwrap(); - serde_json::from_slice::>(&response.body) - .expect("failed to parse SOL RPC canister log") - .entries - } } #[derive(Copy, Clone, Eq, PartialEq, Debug)] From e7d40aafa99657c2865d280264d9a367b596d397 Mon Sep 17 00:00:00 2001 From: Louis Pahlavi Date: Fri, 28 Feb 2025 11:19:18 +0100 Subject: [PATCH 17/20] Clippy --- canister/src/state/mod.rs | 2 +- canlog_derive/src/lib.rs | 2 +- libs/types/src/lifecycle/mod.rs | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/canister/src/state/mod.rs b/canister/src/state/mod.rs index f65610df..7dc24526 100644 --- a/canister/src/state/mod.rs +++ b/canister/src/state/mod.rs @@ -3,13 +3,13 @@ mod tests; use crate::types::{ApiKey, OverrideProvider}; use candid::{Deserialize, Principal}; +use canlog::LogFilter; use ic_stable_structures::{ memory_manager::{MemoryId, MemoryManager, VirtualMemory}, storable::Bound, Cell, DefaultMemoryImpl, Storable, }; use serde::Serialize; -use canlog::LogFilter; use sol_rpc_types::{InstallArgs, ProviderId}; use std::{borrow::Cow, cell::RefCell, collections::BTreeMap}; diff --git a/canlog_derive/src/lib.rs b/canlog_derive/src/lib.rs index 4a960e23..d36e7a73 100644 --- a/canlog_derive/src/lib.rs +++ b/canlog_derive/src/lib.rs @@ -31,7 +31,7 @@ pub fn derive_log_priority(input: TokenStream) -> TokenStream { let buffer_declarations = variants.iter().map(|variant| { let variant_ident = &variant.ident; let info = LogLevelInfo::from_variant(variant) - .expect(format!("Invalid attributes for log level: {}", variant_ident).as_str()); + .unwrap_or_else(|_| panic!("Invalid attributes for log level: {}", variant_ident)); let buffer_ident = get_buffer_ident(variant_ident); let sink_ident = get_sink_ident(variant_ident); diff --git a/libs/types/src/lifecycle/mod.rs b/libs/types/src/lifecycle/mod.rs index 0afe3172..eaa44d78 100644 --- a/libs/types/src/lifecycle/mod.rs +++ b/libs/types/src/lifecycle/mod.rs @@ -1,7 +1,7 @@ use crate::OverrideProvider; use candid::{CandidType, Principal}; -use serde::Deserialize; use canlog::LogFilter; +use serde::Deserialize; /// The installation args for the Solana RPC canister #[derive(Clone, Debug, Default, CandidType, Deserialize)] From 7c40901e6764b06597a61f3bfb77d1057b94c9c5 Mon Sep 17 00:00:00 2001 From: Louis Pahlavi Date: Fri, 28 Feb 2025 11:46:49 +0100 Subject: [PATCH 18/20] Fix documentation errors --- Cargo.lock | 3 ++- canlog/src/lib.rs | 31 ++++++++++++++++++++++--------- canlog_derive/Cargo.toml | 7 +++++-- canlog_derive/src/lib.rs | 12 +----------- 4 files changed, 30 insertions(+), 23 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 97007358..cb17b89c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -473,10 +473,11 @@ dependencies = [ name = "canlog_derive" version = "0.1.0" dependencies = [ + "canlog", "darling", - "ic-canister-log", "proc-macro2", "quote", + "serde", "syn 2.0.98", ] diff --git a/canlog/src/lib.rs b/canlog/src/lib.rs index ed83fb8e..7070aeb3 100644 --- a/canlog/src/lib.rs +++ b/canlog/src/lib.rs @@ -54,15 +54,28 @@ mod types; extern crate self as canlog; pub use crate::types::{LogFilter, Sort}; -#[cfg(any(feature = "derive", test))] -pub use canlog_derive::*; + +use ic_canister_log; pub use ic_canister_log::{ declare_log_buffer, export as export_logs, log as raw_log, GlobalBuffer, Sink, }; use serde::{Deserialize, Serialize}; -/// Wrapper for the [`ic_canister_log::log`] macro that allows logging -/// for a given variant of an enum implementing the [`LogPriorityLevels`] +#[cfg(any(feature = "derive", test))] +/// A procedural macro to implement [`LogPriorityLevels`] for an enum. +/// +/// This macro expects the variants to be annotated with `#[log_level(capacity = N, name = "NAME")]` +/// where `N` is an integer representing buffer capacity and `"NAME"` is a string display +/// representation for the corresponding log level. +/// +/// The enum annotated with `#[derive(LogPriorityLevels)]` must also implement the +/// [`Serialize`], [`Deserialize`], [`Clone`] and [`Copy`] traits. +/// +/// See the top-level crate documentation for example usage. +pub use canlog_derive::LogPriorityLevels; + +/// Wrapper for the [`ic_canister_log::log`](ic_canister_log::log!) macro that allows +/// logging for a given variant of an enum implementing the [`LogPriorityLevels`] /// trait. See the example in the crate documentation. #[macro_export] macro_rules! log { @@ -75,7 +88,8 @@ macro_rules! log { } /// Represents a log priority level. This trait is meant to be implemented -/// automatically by the [`declare_log_priorities!`] macro. +/// automatically with the [`derive`](macro@derive) attribute macro which +/// is available with the `derive` feature of this crate. pub trait LogPriorityLevels { #[doc(hidden)] fn get_buffer(&self) -> &'static GlobalBuffer; @@ -92,8 +106,7 @@ pub trait LogPriorityLevels { } /// Returns the [`LogFilter`] to check what entries to record. This trait should -/// be implemented manually for the log priority level enum generated by the -/// [`declare_log_priorities!`] macro. +/// be implemented manually. pub trait GetLogFilter { /// Returns a [`LogFilter`]. Only log entries matching this filter will be recorded. fn get_log_filter() -> LogFilter; @@ -133,7 +146,7 @@ impl<'de, Priority> Log where Priority: LogPriorityLevels + Clone + Copy + Deserialize<'de> + Serialize + 'static, { - /// Append all the entries from the given [`Priority`] to [`entries`]. + /// Append all the entries from the given `Priority` to [`Log::entries`]. pub fn push_logs(&mut self, priority: Priority) { for entry in export_logs(priority.get_buffer()) { self.entries.push(LogEntry { @@ -147,7 +160,7 @@ where } } - /// Append all the entries from all priority levels to [`entries`]. + /// Append all the entries from all priority levels to [`Log::entries`]. pub fn push_all(&mut self) { Priority::get_priorities() .iter() diff --git a/canlog_derive/Cargo.toml b/canlog_derive/Cargo.toml index b316a0a3..8dd16ae7 100644 --- a/canlog_derive/Cargo.toml +++ b/canlog_derive/Cargo.toml @@ -11,12 +11,15 @@ readme = "README.md" include = ["src", "Cargo.toml", "CHANGELOG.md", "LICENSE", "README.md"] [dependencies] -ic-canister-log = { workspace = true } - syn = { version = "2.0.98", features = ["derive"] } quote = "1.0.38" proc-macro2 = "1.0.93" darling = "0.20.10" +[dev-dependencies] +canlog = { path = "../canlog" } +serde = { workspace = true, features = ["derive"] } + [lib] proc-macro = true + diff --git a/canlog_derive/src/lib.rs b/canlog_derive/src/lib.rs index d36e7a73..7babea6f 100644 --- a/canlog_derive/src/lib.rs +++ b/canlog_derive/src/lib.rs @@ -1,7 +1,6 @@ -//! Procedural macros for the canlog crate +//! Procedural macros for the canlog crate. Refer to the canlog crate documentation. #![forbid(unsafe_code)] -#![forbid(missing_docs)] use darling::FromVariant; use proc_macro::TokenStream; @@ -9,15 +8,6 @@ use proc_macro2::Ident; use quote::quote; use syn::{parse_macro_input, Data, DataEnum, DeriveInput}; -/// A procedural macro to implement [`LogPriorityLevels`](canlog::LogPriorityLevels) for an enum. -/// -/// This macro expects the variants to be annotated with `#[log_level(capacity = N, name = "NAME")]` -/// where `N` is an integer representing buffer capacity and `"NAME"` is a string display -/// representation for the corresponding log level. -/// -/// The enum annotated with `#[derive(LogPriorityLevels)]` must also implement the -/// [`Serialize`](serde::Serialize), [`Deserialize`](serde::Deserialize), -/// [`Clone`](core::clone::Clone) and [`Copy`](core::marker::Copy) traits #[proc_macro_derive(LogPriorityLevels, attributes(log_level))] pub fn derive_log_priority(input: TokenStream) -> TokenStream { let input = parse_macro_input!(input as DeriveInput); From 82a67feefc3e9c233e90c9ee73fd1c9dd2e8beed Mon Sep 17 00:00:00 2001 From: Louis Pahlavi Date: Fri, 28 Feb 2025 11:58:55 +0100 Subject: [PATCH 19/20] Removed unused import --- canlog/src/lib.rs | 1 - 1 file changed, 1 deletion(-) diff --git a/canlog/src/lib.rs b/canlog/src/lib.rs index 7070aeb3..c5409dc0 100644 --- a/canlog/src/lib.rs +++ b/canlog/src/lib.rs @@ -55,7 +55,6 @@ extern crate self as canlog; pub use crate::types::{LogFilter, Sort}; -use ic_canister_log; pub use ic_canister_log::{ declare_log_buffer, export as export_logs, log as raw_log, GlobalBuffer, Sink, }; From d2f839d9cc4016f80990419963ad72a2af1484ef Mon Sep 17 00:00:00 2001 From: Louis Pahlavi Date: Fri, 28 Feb 2025 12:02:24 +0100 Subject: [PATCH 20/20] Add #[doc(inline)] --- canlog/src/lib.rs | 1 + 1 file changed, 1 insertion(+) diff --git a/canlog/src/lib.rs b/canlog/src/lib.rs index c5409dc0..7ea1b3ac 100644 --- a/canlog/src/lib.rs +++ b/canlog/src/lib.rs @@ -71,6 +71,7 @@ use serde::{Deserialize, Serialize}; /// [`Serialize`], [`Deserialize`], [`Clone`] and [`Copy`] traits. /// /// See the top-level crate documentation for example usage. +#[doc(inline)] pub use canlog_derive::LogPriorityLevels; /// Wrapper for the [`ic_canister_log::log`](ic_canister_log::log!) macro that allows