diff --git a/crates/goose-cli/src/commands/schedule.rs b/crates/goose-cli/src/commands/schedule.rs index f8773f8da7ee..dda6583e487a 100644 --- a/crates/goose-cli/src/commands/schedule.rs +++ b/crates/goose-cli/src/commands/schedule.rs @@ -1,21 +1,10 @@ use anyhow::{bail, Context, Result}; -use base64::engine::{general_purpose::STANDARD as BASE64_STANDARD, Engine}; use goose::scheduler::{ - get_default_scheduled_recipes_dir, get_default_scheduler_storage_path, ScheduledJob, + get_default_scheduled_recipes_dir, get_default_scheduler_storage_path, ScheduledJob, Scheduler, SchedulerError, }; -use goose::scheduler_factory::SchedulerFactory; use std::path::Path; -// Base64 decoding function - might be needed if recipe_source_arg can be base64 -// For now, handle_schedule_add will assume it's a path. -async fn _decode_base64_recipe(source: &str) -> Result { - let bytes = BASE64_STANDARD - .decode(source.as_bytes()) - .with_context(|| "Recipe source is not a valid path and not valid Base64.")?; - String::from_utf8(bytes).with_context(|| "Decoded Base64 recipe source is not valid UTF-8.") -} - fn validate_cron_expression(cron: &str) -> Result<()> { // Basic validation and helpful suggestions if cron.trim().is_empty() { @@ -84,7 +73,6 @@ pub async fn handle_schedule_add( schedule_id, cron, recipe_source_arg ); - // Validate cron expression and provide helpful feedback validate_cron_expression(&cron)?; // The Scheduler's add_scheduled_job will handle copying the recipe from recipe_source_arg @@ -102,7 +90,7 @@ pub async fn handle_schedule_add( let scheduler_storage_path = get_default_scheduler_storage_path().context("Failed to get scheduler storage path")?; - let scheduler = SchedulerFactory::create(scheduler_storage_path) + let scheduler = Scheduler::new(scheduler_storage_path) .await .context("Failed to initialize scheduler")?; @@ -148,11 +136,11 @@ pub async fn handle_schedule_add( pub async fn handle_schedule_list() -> Result<()> { let scheduler_storage_path = get_default_scheduler_storage_path().context("Failed to get scheduler storage path")?; - let scheduler = SchedulerFactory::create(scheduler_storage_path) + let scheduler = Scheduler::new(scheduler_storage_path) .await .context("Failed to initialize scheduler")?; - let jobs = scheduler.list_scheduled_jobs().await?; + let jobs = scheduler.list_scheduled_jobs().await; if jobs.is_empty() { println!("No scheduled jobs found."); } else { @@ -183,7 +171,7 @@ pub async fn handle_schedule_list() -> Result<()> { pub async fn handle_schedule_remove(schedule_id: String) -> Result<()> { let scheduler_storage_path = get_default_scheduler_storage_path().context("Failed to get scheduler storage path")?; - let scheduler = SchedulerFactory::create(scheduler_storage_path) + let scheduler = Scheduler::new(scheduler_storage_path) .await .context("Failed to initialize scheduler")?; @@ -210,7 +198,7 @@ pub async fn handle_schedule_remove(schedule_id: String) -> Result<()> { pub async fn handle_schedule_sessions(schedule_id: String, limit: Option) -> Result<()> { let scheduler_storage_path = get_default_scheduler_storage_path().context("Failed to get scheduler storage path")?; - let scheduler = SchedulerFactory::create(scheduler_storage_path) + let scheduler = Scheduler::new(scheduler_storage_path) .await .context("Failed to initialize scheduler")?; @@ -246,7 +234,7 @@ pub async fn handle_schedule_sessions(schedule_id: String, limit: Option) pub async fn handle_schedule_run_now(schedule_id: String) -> Result<()> { let scheduler_storage_path = get_default_scheduler_storage_path().context("Failed to get scheduler storage path")?; - let scheduler = SchedulerFactory::create(scheduler_storage_path) + let scheduler = Scheduler::new(scheduler_storage_path) .await .context("Failed to initialize scheduler")?; diff --git a/crates/goose-server/src/routes/schedule.rs b/crates/goose-server/src/routes/schedule.rs index 077bf0e2c364..843be4d3a422 100644 --- a/crates/goose-server/src/routes/schedule.rs +++ b/crates/goose-server/src/routes/schedule.rs @@ -16,8 +16,6 @@ pub struct CreateScheduleRequest { id: String, recipe_source: String, cron: String, - #[serde(default)] - execution_mode: Option, // "foreground" or "background" } #[derive(Deserialize, Serialize, utoipa::ToSchema)] @@ -36,7 +34,6 @@ pub struct KillJobResponse { message: String, } -// Response for the inspect endpoint #[derive(Serialize, utoipa::ToSchema)] #[serde(rename_all = "camelCase")] pub struct InspectJobResponse { @@ -51,15 +48,9 @@ pub struct RunNowResponse { session_id: String, } -// Query parameters for the sessions endpoint #[derive(Deserialize, utoipa::ToSchema, utoipa::IntoParams)] pub struct SessionsQuery { - #[serde(default = "default_limit")] - limit: u32, -} - -fn default_limit() -> u32 { - 50 // Default limit for sessions listed + limit: usize, } // Struct for the frontend session list @@ -151,10 +142,7 @@ async fn list_schedules( .map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?; tracing::info!("Server: Calling scheduler.list_scheduled_jobs()"); - let jobs = scheduler.list_scheduled_jobs().await.map_err(|e| { - eprintln!("Error listing schedules: {:?}", e); - StatusCode::INTERNAL_SERVER_ERROR - })?; + let jobs = scheduler.list_scheduled_jobs().await; Ok(Json(ListSchedulesResponse { jobs })) } @@ -213,39 +201,40 @@ async fn run_now_handler( .await .map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?; - let (recipe_display_name, recipe_version_opt) = match scheduler.list_scheduled_jobs().await { - Ok(jobs) => { - if let Some(job) = jobs.into_iter().find(|job| job.id == id) { - let recipe_display_name = std::path::Path::new(&job.source) - .file_name() - .and_then(|name| name.to_str()) - .map(|s| s.to_string()) - .unwrap_or_else(|| id.clone()); - - let recipe_version_opt = tokio::fs::read_to_string(&job.source) - .await + let (recipe_display_name, recipe_version_opt) = if let Some(job) = scheduler + .list_scheduled_jobs() + .await + .into_iter() + .find(|job| job.id == id) + { + let recipe_display_name = std::path::Path::new(&job.source) + .file_name() + .and_then(|name| name.to_str()) + .map(|s| s.to_string()) + .unwrap_or_else(|| id.clone()); + + let recipe_version_opt = + tokio::fs::read_to_string(&job.source) + .await + .ok() + .and_then(|content: String| { + goose::recipe::template_recipe::parse_recipe_content( + &content, + Some( + std::path::Path::new(&job.source) + .parent() + .unwrap_or_else(|| std::path::Path::new("")) + .to_string_lossy() + .to_string(), + ), + ) .ok() - .and_then(|content| { - goose::recipe::template_recipe::parse_recipe_content( - &content, - Some( - std::path::Path::new(&job.source) - .parent() - .unwrap_or_else(|| std::path::Path::new("")) - .to_string_lossy() - .to_string(), - ), - ) - .ok() - .map(|(r, _)| r.version) - }); - - (recipe_display_name, recipe_version_opt) - } else { - (id.clone(), None) - } - } - Err(_) => (id.clone(), None), + .map(|(r, _)| r.version) + }); + + (recipe_display_name, recipe_version_opt) + } else { + (id.clone(), None) }; let recipe_version_tag = recipe_version_opt.as_deref().unwrap_or(""); @@ -308,7 +297,7 @@ async fn sessions_handler( .map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?; match scheduler - .sessions(&schedule_id_param, query_params.limit as usize) + .sessions(&schedule_id_param, query_params.limit) .await { Ok(session_tuples) => { @@ -448,11 +437,7 @@ async fn update_schedule( } })?; - // Return the updated schedule - let jobs = scheduler.list_scheduled_jobs().await.map_err(|e| { - eprintln!("Error listing schedules after update: {:?}", e); - StatusCode::INTERNAL_SERVER_ERROR - })?; + let jobs = scheduler.list_scheduled_jobs().await; let updated_job = jobs .into_iter() .find(|job| job.id == id) diff --git a/crates/goose/src/agents/agent.rs b/crates/goose/src/agents/agent.rs index 6d381b8032c2..36fdfd4b4381 100644 --- a/crates/goose/src/agents/agent.rs +++ b/crates/goose/src/agents/agent.rs @@ -39,7 +39,6 @@ use crate::permission::PermissionConfirmation; use crate::providers::base::Provider; use crate::providers::errors::ProviderError; use crate::recipe::{Author, Recipe, Response, Settings, SubRecipe}; -use crate::scheduler_trait::SchedulerTrait; use crate::security::security_inspector::SecurityInspector; use crate::tool_inspection::ToolInspectionManager; use crate::tool_monitor::RepetitionInspector; @@ -60,6 +59,7 @@ use super::platform_tools; use super::tool_execution::{ToolCallResult, CHAT_MODE_TOOL_SKIPPED_RESPONSE, DECLINED_RESPONSE}; use crate::agents::subagent_task_config::TaskConfig; use crate::conversation::message::{Message, MessageContent, SystemNotificationType, ToolRequest}; +use crate::scheduler_trait::SchedulerTrait; use crate::session::extension_data::{EnabledExtensionsState, ExtensionState}; use crate::session::{Session, SessionManager}; @@ -346,7 +346,6 @@ impl Agent { Ok(tool_futures) } - /// Set the scheduler service for this agent pub async fn set_scheduler(&self, scheduler: Arc) { let mut scheduler_service = self.scheduler_service.lock().await; *scheduler_service = Some(scheduler); diff --git a/crates/goose/src/agents/schedule_tool.rs b/crates/goose/src/agents/schedule_tool.rs index 3651c9bc7c96..45fd7866787a 100644 --- a/crates/goose/src/agents/schedule_tool.rs +++ b/crates/goose/src/agents/schedule_tool.rs @@ -9,11 +9,10 @@ use crate::mcp_utils::ToolResult; use chrono::Utc; use rmcp::model::{Content, ErrorCode, ErrorData}; +use super::Agent; use crate::recipe::Recipe; use crate::scheduler_trait::SchedulerTrait; -use super::Agent; - impl Agent { /// Handle schedule management tool calls pub async fn handle_schedule_management( @@ -62,34 +61,24 @@ impl Agent { } } - /// List all scheduled jobs async fn handle_list_jobs( &self, scheduler: Arc, ) -> ToolResult> { - match scheduler.list_scheduled_jobs().await { - Ok(jobs) => { - let jobs_json = serde_json::to_string_pretty(&jobs).map_err(|e| { - ErrorData::new( - ErrorCode::INTERNAL_ERROR, - format!("Failed to serialize jobs: {}", e), - None, - ) - })?; - Ok(vec![Content::text(format!( - "Scheduled Jobs:\n{}", - jobs_json - ))]) - } - Err(e) => Err(ErrorData::new( + let jobs = scheduler.list_scheduled_jobs().await; + let jobs_json = serde_json::to_string_pretty(&jobs).map_err(|e| { + ErrorData::new( ErrorCode::INTERNAL_ERROR, - format!("Failed to list jobs: {}", e), + format!("Failed to serialize jobs: {}", e), None, - )), - } + ) + })?; + Ok(vec![Content::text(format!( + "Scheduled Jobs:\n{}", + jobs_json + ))]) } - /// Create a new scheduled job from a recipe file async fn handle_create_job( &self, scheduler: Arc, @@ -123,19 +112,6 @@ impl Agent { .and_then(|v| v.as_str()) .unwrap_or("background"); - // Validate execution_mode is either "foreground" or "background" - if execution_mode != "foreground" && execution_mode != "background" { - return Err(ErrorData::new( - ErrorCode::INTERNAL_ERROR, - format!( - "Invalid execution_mode: {}. Must be 'foreground' or 'background'", - execution_mode - ), - None, - )); - } - - // Validate recipe file exists and is readable if !std::path::Path::new(recipe_path).exists() { return Err(ErrorData::new( ErrorCode::INTERNAL_ERROR, diff --git a/crates/goose/src/cron_test.rs b/crates/goose/src/cron_test.rs deleted file mode 100644 index 1fda3413a8be..000000000000 --- a/crates/goose/src/cron_test.rs +++ /dev/null @@ -1,56 +0,0 @@ -#[cfg(test)] -mod cron_parsing_tests { - use crate::scheduler::normalize_cron_expression; - use tokio_cron_scheduler::Job; - - // Helper: drop the last field if we have 7 so tokio_cron_scheduler (6-field) can parse - fn to_tokio_spec(spec: &str) -> String { - let parts: Vec<&str> = spec.split_whitespace().collect(); - if parts.len() == 7 { - parts[..6].join(" ") - } else { - spec.to_string() - } - } - - #[test] - fn test_normalize_cron_expression() { - // 5-field → 7-field - assert_eq!(normalize_cron_expression("0 12 * * *"), "0 0 12 * * * *"); - assert_eq!(normalize_cron_expression("*/5 * * * *"), "0 */5 * * * * *"); - assert_eq!(normalize_cron_expression("0 0 * * 1"), "0 0 0 * * 1 *"); - - // 6-field → 7-field (append *) - assert_eq!(normalize_cron_expression("0 0 12 * * *"), "0 0 12 * * * *"); - assert_eq!( - normalize_cron_expression("*/30 */5 * * * *"), - "*/30 */5 * * * * *" - ); - - // Weekday expressions (unchanged apart from 7-field format) - assert_eq!(normalize_cron_expression("0 * * * 1-5"), "0 0 * * * 1-5 *"); - assert_eq!( - normalize_cron_expression("*/20 * * * 1-5"), - "0 */20 * * * 1-5 *" - ); - } - - #[tokio::test] - async fn test_cron_expression_formats() { - let samples = [ - "0 0 * * *", // 5-field - "0 0 0 * * *", // 6-field - "*/5 * * * *", // 5-field - ]; - for expr in samples { - let norm = normalize_cron_expression(expr); - let tokio_spec = to_tokio_spec(&norm); - assert!( - Job::new_async(&tokio_spec, |_id, _l| Box::pin(async {})).is_ok(), - "failed to parse {} -> {}", - expr, - norm - ); - } - } -} diff --git a/crates/goose/src/execution/manager.rs b/crates/goose/src/execution/manager.rs index 615f7943213f..c222be2d42da 100644 --- a/crates/goose/src/execution/manager.rs +++ b/crates/goose/src/execution/manager.rs @@ -1,7 +1,7 @@ use crate::agents::extension::PlatformExtensionContext; use crate::agents::Agent; use crate::config::paths::Paths; -use crate::scheduler_factory::SchedulerFactory; +use crate::scheduler::Scheduler; use crate::scheduler_trait::SchedulerTrait; use anyhow::Result; use lru::LruCache; @@ -35,7 +35,7 @@ impl AgentManager { async fn new(max_sessions: Option) -> Result { let schedule_file_path = Paths::data_dir().join("schedule.json"); - let scheduler = SchedulerFactory::create(schedule_file_path).await?; + let scheduler = Scheduler::new(schedule_file_path).await?; let capacity = NonZeroUsize::new(max_sessions.unwrap_or(DEFAULT_MAX_SESSION)) .unwrap_or_else(|| NonZeroUsize::new(100).unwrap()); diff --git a/crates/goose/src/lib.rs b/crates/goose/src/lib.rs index 1d6c86fcb33f..17ffb2b00c68 100644 --- a/crates/goose/src/lib.rs +++ b/crates/goose/src/lib.rs @@ -14,7 +14,6 @@ pub mod providers; pub mod recipe; pub mod recipe_deeplink; pub mod scheduler; -pub mod scheduler_factory; pub mod scheduler_trait; pub mod security; pub mod session; @@ -25,6 +24,3 @@ pub mod tool_inspection; pub mod tool_monitor; pub mod tracing; pub mod utils; - -#[cfg(test)] -mod cron_test; diff --git a/crates/goose/src/scheduler.rs b/crates/goose/src/scheduler.rs index 509450502ee5..f3cc9a775047 100644 --- a/crates/goose/src/scheduler.rs +++ b/crates/goose/src/scheduler.rs @@ -6,10 +6,11 @@ use std::sync::Arc; use anyhow::{anyhow, Result}; use async_trait::async_trait; -use chrono::{DateTime, Utc}; +use chrono::{DateTime, Local, Utc}; use serde::{Deserialize, Serialize}; use tokio::sync::Mutex; use tokio_cron_scheduler::{job::JobId, Job, JobScheduler as TokioJobScheduler}; +use tokio_util::sync::CancellationToken; use crate::agents::AgentEvent; use crate::agents::{Agent, SessionConfig}; @@ -17,52 +18,15 @@ use crate::config::paths::Paths; use crate::config::Config; use crate::conversation::message::Message; use crate::conversation::Conversation; -use crate::providers::base::Provider as GooseProvider; // Alias to avoid conflict in test section use crate::providers::create; use crate::recipe::Recipe; use crate::scheduler_trait::SchedulerTrait; use crate::session::session_manager::SessionType; use crate::session::{Session, SessionManager}; -// Track running tasks with their abort handles -type RunningTasksMap = HashMap; +type RunningTasksMap = HashMap; type JobsMap = HashMap; -/// Normalize a cron string so that: -/// 1. It is always in **quartz 7-field format** expected by Temporal -/// (seconds minutes hours dom month dow year). -/// 2. Five-field → prepend seconds `0` and append year `*`. -/// Six-field → append year `*`. -/// 3. Everything else returned unchanged (with a warning). -pub fn normalize_cron_expression(src: &str) -> String { - let mut parts: Vec<&str> = src.split_whitespace().collect(); - - match parts.len() { - 5 => { - // min hour dom mon dow → 0 min hour dom mon dow * - parts.insert(0, "0"); - parts.push("*"); - } - 6 => { - // sec min hour dom mon dow → sec min hour dom mon dow * - parts.push("*"); - } - 7 => { - // already quartz – do nothing - } - _ => { - tracing::warn!( - "Unrecognised cron expression '{}': expected 5, 6 or 7 fields (got {}). Leaving unchanged.", - src, - parts.len() - ); - return src.to_string(); - } - } - - parts.join(" ") -} - pub fn get_default_scheduler_storage_path() -> Result { let data_dir = Paths::data_dir(); fs::create_dir_all(&data_dir)?; @@ -73,10 +37,6 @@ pub fn get_default_scheduled_recipes_dir() -> Result { let data_dir = Paths::data_dir(); let recipes_dir = data_dir.join("scheduled_recipes"); fs::create_dir_all(&recipes_dir).map_err(SchedulerError::StorageError)?; - tracing::debug!( - "Created scheduled recipes directory at: {}", - recipes_dir.display() - ); Ok(recipes_dir) } @@ -155,22 +115,22 @@ pub struct ScheduledJob { pub process_start_time: Option>, } -async fn persist_jobs_from_arc( +async fn persist_jobs( storage_path: &Path, - jobs_arc: &Arc>, + jobs: &Arc>, ) -> Result<(), SchedulerError> { - let jobs_guard = jobs_arc.lock().await; + let jobs_guard = jobs.lock().await; let list: Vec = jobs_guard.values().map(|(_, j)| j.clone()).collect(); if let Some(parent) = storage_path.parent() { - fs::create_dir_all(parent).map_err(SchedulerError::StorageError)?; + fs::create_dir_all(parent)?; } - let data = serde_json::to_string_pretty(&list).map_err(SchedulerError::from)?; - fs::write(storage_path, data).map_err(SchedulerError::StorageError)?; + let data = serde_json::to_string_pretty(&list)?; + fs::write(storage_path, data)?; Ok(()) } pub struct Scheduler { - internal_scheduler: TokioJobScheduler, + tokio_scheduler: TokioJobScheduler, jobs: Arc>, storage_path: PathBuf, running_tasks: Arc>, @@ -186,15 +146,15 @@ impl Scheduler { let running_tasks = Arc::new(Mutex::new(HashMap::new())); let arc_self = Arc::new(Self { - internal_scheduler, + tokio_scheduler: internal_scheduler, jobs, storage_path, running_tasks, }); - arc_self.load_jobs_from_storage().await?; + arc_self.load_jobs_from_storage().await; arc_self - .internal_scheduler + .tokio_scheduler .start() .await .map_err(|e| SchedulerError::SchedulerInternalError(e.to_string()))?; @@ -202,411 +162,236 @@ impl Scheduler { Ok(arc_self) } - pub async fn add_scheduled_job( - &self, - original_job_spec: ScheduledJob, - ) -> Result<(), SchedulerError> { - let mut jobs_guard = self.jobs.lock().await; - if jobs_guard.contains_key(&original_job_spec.id) { - return Err(SchedulerError::JobIdExists(original_job_spec.id.clone())); - } + fn create_cron_task(&self, job: ScheduledJob) -> Result { + let job_for_task = job.clone(); + let jobs_arc = self.jobs.clone(); + let storage_path = self.storage_path.clone(); + let running_tasks_arc = self.running_tasks.clone(); - let original_recipe_path = Path::new(&original_job_spec.source); - if !original_recipe_path.exists() { - return Err(SchedulerError::RecipeLoadError(format!( - "Original recipe file not found: {}", - original_job_spec.source - ))); - } - if !original_recipe_path.is_file() { - return Err(SchedulerError::RecipeLoadError(format!( - "Original recipe source is not a file: {}", - original_job_spec.source - ))); - } + let cron_parts: Vec<&str> = job.cron.split_whitespace().collect(); + let cron = match cron_parts.len() { + 5 => { + tracing::warn!( + "Job '{}' has legacy 5-field cron '{}', converting to 6-field", + job.id, + job.cron + ); + format!("0 {}", job.cron) + } + 6 => job.cron.clone(), + _ => { + return Err(SchedulerError::CronParseError(format!( + "Invalid cron expression '{}': expected 5 or 6 fields, got {}", + job.cron, + cron_parts.len() + ))) + } + }; - let scheduled_recipes_dir = get_default_scheduled_recipes_dir()?; - let original_extension = original_recipe_path - .extension() - .and_then(|ext| ext.to_str()) - .unwrap_or("yaml"); - - let destination_filename = format!("{}.{}", original_job_spec.id, original_extension); - let destination_recipe_path = scheduled_recipes_dir.join(destination_filename); + let local_tz = Local::now().timezone(); tracing::info!( - "Copying recipe from {} to {}", - original_recipe_path.display(), - destination_recipe_path.display() + "Creating cron task for job '{}' cron: '{}' in timezone: {:?}", + job.id, + cron, + local_tz ); - fs::copy(original_recipe_path, &destination_recipe_path).map_err(|e| { - SchedulerError::StorageError(io::Error::new( - e.kind(), - format!( - "Failed to copy recipe from {} to {}: {}", - original_job_spec.source, - destination_recipe_path.display(), - e - ), - )) - })?; - let mut stored_job = original_job_spec.clone(); - stored_job.source = destination_recipe_path.to_string_lossy().into_owned(); - stored_job.current_session_id = None; - stored_job.process_start_time = None; - tracing::info!("Updated job source path to: {}", stored_job.source); - - let job_for_task = stored_job.clone(); - let jobs_arc_for_task = self.jobs.clone(); - let storage_path_for_task = self.storage_path.clone(); - let running_tasks_for_task = self.running_tasks.clone(); - - tracing::info!("Attempting to parse cron expression: '{}'", stored_job.cron); - let normalized_cron = normalize_cron_expression(&stored_job.cron); - // Convert from 7-field (Temporal format) to 6-field (tokio-cron-scheduler format) - let tokio_cron = { - let parts: Vec<&str> = normalized_cron.split_whitespace().collect(); - if parts.len() == 7 { - parts[..6].join(" ") - } else { - normalized_cron.clone() - } - }; - if tokio_cron != stored_job.cron { - tracing::info!( - "Converted cron expression from '{}' to '{}' for tokio-cron-scheduler", - stored_job.cron, - tokio_cron - ); - } - let cron_task = Job::new_async(&tokio_cron, move |_uuid, _l| { + Job::new_async_tz(&cron, local_tz, move |_uuid, _l| { + tracing::info!("Cron task triggered for job '{}'", job_for_task.id); let task_job_id = job_for_task.id.clone(); - let current_jobs_arc = jobs_arc_for_task.clone(); - let local_storage_path = storage_path_for_task.clone(); - let job_to_execute = job_for_task.clone(); // Clone for run_scheduled_job_internal - let running_tasks_arc = running_tasks_for_task.clone(); + let current_jobs_arc = jobs_arc.clone(); + let local_storage_path = storage_path.clone(); + let job_to_execute = job_for_task.clone(); + let running_tasks = running_tasks_arc.clone(); Box::pin(async move { - // Check if the job is paused before executing let should_execute = { - let jobs_map_guard = current_jobs_arc.lock().await; - if let Some((_, current_job_in_map)) = jobs_map_guard.get(&task_job_id) { - !current_job_in_map.paused - } else { - false - } + let jobs_guard = current_jobs_arc.lock().await; + jobs_guard + .get(&task_job_id) + .map(|(_, j)| !j.paused) + .unwrap_or(false) }; if !should_execute { - tracing::info!("Skipping execution of paused job '{}'", &task_job_id); + tracing::info!("Skipping paused job '{}'", task_job_id); return; } let current_time = Utc::now(); - let mut needs_persist = false; { - let mut jobs_map_guard = current_jobs_arc.lock().await; - if let Some((_, current_job_in_map)) = jobs_map_guard.get_mut(&task_job_id) { - current_job_in_map.last_run = Some(current_time); - current_job_in_map.currently_running = true; - current_job_in_map.process_start_time = Some(current_time); - needs_persist = true; + let mut jobs_guard = current_jobs_arc.lock().await; + if let Some((_, job)) = jobs_guard.get_mut(&task_job_id) { + job.last_run = Some(current_time); + job.currently_running = true; + job.process_start_time = Some(current_time); } } - if needs_persist { - if let Err(e) = - persist_jobs_from_arc(&local_storage_path, ¤t_jobs_arc).await - { - tracing::error!( - "Failed to persist last_run update for job {}: {}", - &task_job_id, - e - ); - } + if let Err(e) = persist_jobs(&local_storage_path, ¤t_jobs_arc).await { + tracing::error!("Failed to persist job status: {}", e); } - // Spawn the job execution as an abortable task - let job_task = tokio::spawn(run_scheduled_job_internal( - job_to_execute.clone(), - None, - Some(current_jobs_arc.clone()), - Some(task_job_id.clone()), - )); - - // Store the abort handle at the scheduler level + let cancel_token = CancellationToken::new(); { - let mut running_tasks_guard = running_tasks_arc.lock().await; - running_tasks_guard.insert(task_job_id.clone(), job_task.abort_handle()); + let mut tasks = running_tasks.lock().await; + tasks.insert(task_job_id.clone(), cancel_token.clone()); } - // Wait for the job to complete or be aborted - let result = job_task.await; + let result = execute_job( + job_to_execute, + current_jobs_arc.clone(), + task_job_id.clone(), + cancel_token.clone(), + ) + .await; - // Remove the abort handle { - let mut running_tasks_guard = running_tasks_arc.lock().await; - running_tasks_guard.remove(&task_job_id); + let mut tasks = running_tasks.lock().await; + tasks.remove(&task_job_id); } - // Update the job status after execution { - let mut jobs_map_guard = current_jobs_arc.lock().await; - if let Some((_, current_job_in_map)) = jobs_map_guard.get_mut(&task_job_id) { - current_job_in_map.currently_running = false; - current_job_in_map.current_session_id = None; - current_job_in_map.process_start_time = None; - needs_persist = true; + let mut jobs_guard = current_jobs_arc.lock().await; + if let Some((_, job)) = jobs_guard.get_mut(&task_job_id) { + job.currently_running = false; + job.current_session_id = None; + job.process_start_time = None; } } - if needs_persist { - if let Err(e) = - persist_jobs_from_arc(&local_storage_path, ¤t_jobs_arc).await - { - tracing::error!( - "Failed to persist running status update for job {}: {}", - &task_job_id, - e - ); - } + if let Err(e) = persist_jobs(&local_storage_path, ¤t_jobs_arc).await { + tracing::error!("Failed to persist job completion: {}", e); } match result { - Ok(Ok(_session_id)) => { - tracing::info!("Scheduled job '{}' completed successfully", &task_job_id); - } - Ok(Err(e)) => { - tracing::error!( - "Scheduled job '{}' execution failed: {}", - &e.job_id, - e.error - ); - } - Err(join_error) if join_error.is_cancelled() => { - tracing::info!("Scheduled job '{}' was cancelled/killed", &task_job_id); - } - Err(join_error) => { - tracing::error!( - "Scheduled job '{}' task failed: {}", - &task_job_id, - join_error - ); - } + Ok(_) => tracing::info!("Job '{}' completed", task_job_id), + Err(e) => tracing::error!("Job '{}' failed: {}", task_job_id, e), } }) }) - .map_err(|e| SchedulerError::CronParseError(e.to_string()))?; + .map_err(|e| SchedulerError::CronParseError(e.to_string())) + } + + pub async fn add_scheduled_job( + &self, + original_job_spec: ScheduledJob, + ) -> Result<(), SchedulerError> { + { + let jobs_guard = self.jobs.lock().await; + if jobs_guard.contains_key(&original_job_spec.id) { + return Err(SchedulerError::JobIdExists(original_job_spec.id.clone())); + } + } + + let original_recipe_path = Path::new(&original_job_spec.source); + if !original_recipe_path.is_file() { + return Err(SchedulerError::RecipeLoadError(format!( + "Recipe file not found: {}", + original_job_spec.source + ))); + } + + let scheduled_recipes_dir = get_default_scheduled_recipes_dir()?; + let original_extension = original_recipe_path + .extension() + .and_then(|ext| ext.to_str()) + .unwrap_or("yaml"); + + let destination_filename = format!("{}.{}", original_job_spec.id, original_extension); + let destination_recipe_path = scheduled_recipes_dir.join(destination_filename); + + fs::copy(original_recipe_path, &destination_recipe_path)?; + + let mut stored_job = original_job_spec; + stored_job.source = destination_recipe_path.to_string_lossy().into_owned(); + stored_job.current_session_id = None; + stored_job.process_start_time = None; + + let cron_task = self.create_cron_task(stored_job.clone())?; let job_uuid = self - .internal_scheduler + .tokio_scheduler .add(cron_task) .await .map_err(|e| SchedulerError::SchedulerInternalError(e.to_string()))?; - jobs_guard.insert(stored_job.id.clone(), (job_uuid, stored_job)); - // Pass the jobs_guard by reference for the initial persist after adding a job - self.persist_jobs_to_storage_with_guard(&jobs_guard).await?; + { + let mut jobs_guard = self.jobs.lock().await; + jobs_guard.insert(stored_job.id.clone(), (job_uuid, stored_job)); + } + + persist_jobs(&self.storage_path, &self.jobs).await?; Ok(()) } - async fn load_jobs_from_storage(self: &Arc) -> Result<(), SchedulerError> { + async fn load_jobs_from_storage(self: &Arc) { if !self.storage_path.exists() { - return Ok(()); + return; } - let data = fs::read_to_string(&self.storage_path)?; + let data = match fs::read_to_string(&self.storage_path) { + Ok(data) => data, + Err(e) => { + tracing::error!( + "Failed to read schedules.json: {}. Starting with empty schedule list.", + e + ); + return; + } + }; if data.trim().is_empty() { - return Ok(()); + return; } - let list: Vec = serde_json::from_str(&data).map_err(|e| { - SchedulerError::PersistError(format!("Failed to deserialize schedules.json: {}", e)) - })?; + let list: Vec = match serde_json::from_str(&data) { + Ok(jobs) => jobs, + Err(e) => { + tracing::error!( + "Failed to parse schedules.json: {}. Starting with empty schedule list.", + e + ); + return; + } + }; - let mut jobs_guard = self.jobs.lock().await; for job_to_load in list { if !Path::new(&job_to_load.source).exists() { - tracing::warn!("Recipe file {} for scheduled job {} not found in shared store. Skipping job load.", job_to_load.source, job_to_load.id); + tracing::warn!( + "Recipe file {} not found, skipping job '{}'", + job_to_load.source, + job_to_load.id + ); continue; } - let job_for_task = job_to_load.clone(); - let jobs_arc_for_task = self.jobs.clone(); - let storage_path_for_task = self.storage_path.clone(); - let running_tasks_for_task = self.running_tasks.clone(); - - tracing::info!( - "Loading job '{}' with cron expression: '{}'", - job_to_load.id, - job_to_load.cron - ); - let normalized_cron = normalize_cron_expression(&job_to_load.cron); - // Convert from 7-field (Temporal format) to 6-field (tokio-cron-scheduler format) - let tokio_cron = { - let parts: Vec<&str> = normalized_cron.split_whitespace().collect(); - if parts.len() == 7 { - parts[..6].join(" ") - } else { - normalized_cron.clone() + let cron_task = match self.create_cron_task(job_to_load.clone()) { + Ok(task) => task, + Err(e) => { + tracing::error!( + "Failed to create cron task for job '{}': {}. Skipping.", + job_to_load.id, + e + ); + continue; } }; - if tokio_cron != job_to_load.cron { - tracing::info!( - "Converted cron expression from '{}' to '{}' for tokio-cron-scheduler", - job_to_load.cron, - tokio_cron - ); - } - let cron_task = Job::new_async(&tokio_cron, move |_uuid, _l| { - let task_job_id = job_for_task.id.clone(); - let current_jobs_arc = jobs_arc_for_task.clone(); - let local_storage_path = storage_path_for_task.clone(); - let job_to_execute = job_for_task.clone(); // Clone for run_scheduled_job_internal - let running_tasks_arc = running_tasks_for_task.clone(); - - Box::pin(async move { - // Check if the job is paused before executing - let should_execute = { - let jobs_map_guard = current_jobs_arc.lock().await; - if let Some((_, stored_job)) = jobs_map_guard.get(&task_job_id) { - !stored_job.paused - } else { - false - } - }; - - if !should_execute { - tracing::info!("Skipping execution of paused job '{}'", &task_job_id); - return; - } - - let current_time = Utc::now(); - let mut needs_persist = false; - { - let mut jobs_map_guard = current_jobs_arc.lock().await; - if let Some((_, stored_job)) = jobs_map_guard.get_mut(&task_job_id) { - stored_job.last_run = Some(current_time); - stored_job.currently_running = true; - stored_job.process_start_time = Some(current_time); - needs_persist = true; - } - } - if needs_persist { - if let Err(e) = - persist_jobs_from_arc(&local_storage_path, ¤t_jobs_arc).await - { - tracing::error!( - "Failed to persist last_run update for loaded job {}: {}", - &task_job_id, - e - ); - } - } - - // Spawn the job execution as an abortable task - let job_task = tokio::spawn(run_scheduled_job_internal( - job_to_execute, - None, - Some(current_jobs_arc.clone()), - Some(task_job_id.clone()), - )); - - // Store the abort handle at the scheduler level - { - let mut running_tasks_guard = running_tasks_arc.lock().await; - running_tasks_guard.insert(task_job_id.clone(), job_task.abort_handle()); - } - - // Wait for the job to complete or be aborted - let result = job_task.await; - - // Remove the abort handle - { - let mut running_tasks_guard = running_tasks_arc.lock().await; - running_tasks_guard.remove(&task_job_id); - } - - // Update the job status after execution - { - let mut jobs_map_guard = current_jobs_arc.lock().await; - if let Some((_, stored_job)) = jobs_map_guard.get_mut(&task_job_id) { - stored_job.currently_running = false; - stored_job.current_session_id = None; - stored_job.process_start_time = None; - needs_persist = true; - } - } - - if needs_persist { - if let Err(e) = - persist_jobs_from_arc(&local_storage_path, ¤t_jobs_arc).await - { - tracing::error!( - "Failed to persist running status update for job {}: {}", - &task_job_id, - e - ); - } - } - - match result { - Ok(Ok(_session_id)) => { - tracing::info!( - "Scheduled job '{}' completed successfully", - &task_job_id - ); - } - Ok(Err(e)) => { - tracing::error!( - "Scheduled job '{}' execution failed: {}", - &e.job_id, - e.error - ); - } - Err(join_error) if join_error.is_cancelled() => { - tracing::info!("Scheduled job '{}' was cancelled/killed", &task_job_id); - } - Err(join_error) => { - tracing::error!( - "Scheduled job '{}' task failed: {}", - &task_job_id, - join_error - ); - } - } - }) - }) - .map_err(|e| SchedulerError::CronParseError(e.to_string()))?; + let job_uuid = match self.tokio_scheduler.add(cron_task).await { + Ok(uuid) => uuid, + Err(e) => { + tracing::error!( + "Failed to add job '{}' to scheduler: {}. Skipping.", + job_to_load.id, + e + ); + continue; + } + }; - let job_uuid = self - .internal_scheduler - .add(cron_task) - .await - .map_err(|e| SchedulerError::SchedulerInternalError(e.to_string()))?; + let mut jobs_guard = self.jobs.lock().await; jobs_guard.insert(job_to_load.id.clone(), (job_uuid, job_to_load)); } - Ok(()) - } - - // Renamed and kept for direct use when a guard is already held (e.g. add/remove) - async fn persist_jobs_to_storage_with_guard( - &self, - jobs_guard: &tokio::sync::MutexGuard<'_, JobsMap>, - ) -> Result<(), SchedulerError> { - let list: Vec = jobs_guard.values().map(|(_, j)| j.clone()).collect(); - if let Some(parent) = self.storage_path.parent() { - fs::create_dir_all(parent)?; - } - let data = serde_json::to_string_pretty(&list)?; - fs::write(&self.storage_path, data)?; - Ok(()) - } - - // New function that locks and calls the helper, for run_now and potentially other places - async fn persist_jobs(&self) -> Result<(), SchedulerError> { - persist_jobs_from_arc(&self.storage_path, &self.jobs).await } pub async fn list_scheduled_jobs(&self) -> Vec { @@ -619,23 +404,26 @@ impl Scheduler { } pub async fn remove_scheduled_job(&self, id: &str) -> Result<(), SchedulerError> { - let mut jobs_guard = self.jobs.lock().await; - if let Some((job_uuid, scheduled_job)) = jobs_guard.remove(id) { - self.internal_scheduler - .remove(&job_uuid) - .await - .map_err(|e| SchedulerError::SchedulerInternalError(e.to_string()))?; - - let recipe_path = Path::new(&scheduled_job.source); - if recipe_path.exists() { - fs::remove_file(recipe_path).map_err(SchedulerError::StorageError)?; + let (job_uuid, recipe_path) = { + let mut jobs_guard = self.jobs.lock().await; + match jobs_guard.remove(id) { + Some((uuid, job)) => (uuid, job.source.clone()), + None => return Err(SchedulerError::JobNotFound(id.to_string())), } + }; + + self.tokio_scheduler + .remove(&job_uuid) + .await + .map_err(|e| SchedulerError::SchedulerInternalError(e.to_string()))?; - self.persist_jobs_to_storage_with_guard(&jobs_guard).await?; - Ok(()) - } else { - Err(SchedulerError::JobNotFound(id.to_string())) + let path = Path::new(&recipe_path); + if path.exists() { + fs::remove_file(path)?; } + + persist_jobs(&self.storage_path, &self.jobs).await?; + Ok(()) } pub async fn sessions( @@ -647,129 +435,110 @@ impl Scheduler { .await .map_err(|e| SchedulerError::StorageError(io::Error::other(e)))?; - let mut schedule_sessions: Vec<(String, Session)> = Vec::new(); + let mut schedule_sessions: Vec<(String, Session)> = all_sessions + .into_iter() + .filter(|s| s.schedule_id.as_deref() == Some(sched_id)) + .map(|s| (s.id.clone(), s)) + .collect(); - for session in all_sessions { - if session.schedule_id.as_deref() == Some(sched_id) { - schedule_sessions.push((session.id.clone(), session)); - } - } - - // Sort by created_at timestamp, newest first schedule_sessions.sort_by(|a, b| b.1.created_at.cmp(&a.1.created_at)); + schedule_sessions.truncate(limit); - let result_sessions: Vec<(String, Session)> = - schedule_sessions.into_iter().take(limit).collect(); - - Ok(result_sessions) + Ok(schedule_sessions) } pub async fn run_now(&self, sched_id: &str) -> Result { - let job_to_run: ScheduledJob = { + let job_to_run = { let mut jobs_guard = self.jobs.lock().await; match jobs_guard.get_mut(sched_id) { - Some((_, job_def)) => { - // Set the currently_running flag before executing - job_def.currently_running = true; - let job_clone = job_def.clone(); - // Drop the guard before persisting to avoid borrow issues - drop(jobs_guard); - - // Persist the change immediately - self.persist_jobs().await?; - job_clone + Some((_, job)) => { + if job.currently_running { + return Err(SchedulerError::AnyhowError(anyhow!( + "Job '{}' is already running", + sched_id + ))); + } + job.currently_running = true; + job.process_start_time = Some(Utc::now()); + job.clone() } None => return Err(SchedulerError::JobNotFound(sched_id.to_string())), } }; - // Spawn the job execution as an abortable task for run_now - let job_task = tokio::spawn(run_scheduled_job_internal( - job_to_run.clone(), - None, - Some(self.jobs.clone()), - Some(sched_id.to_string()), - )); + persist_jobs(&self.storage_path, &self.jobs).await?; - // Store the abort handle for run_now jobs + let cancel_token = CancellationToken::new(); { - let mut running_tasks_guard = self.running_tasks.lock().await; - running_tasks_guard.insert(sched_id.to_string(), job_task.abort_handle()); + let mut tasks = self.running_tasks.lock().await; + tasks.insert(sched_id.to_string(), cancel_token.clone()); } - // Wait for the job to complete or be aborted - let run_result = job_task.await; + let result = execute_job( + job_to_run, + self.jobs.clone(), + sched_id.to_string(), + cancel_token.clone(), + ) + .await; - // Remove the abort handle { - let mut running_tasks_guard = self.running_tasks.lock().await; - running_tasks_guard.remove(sched_id); + let mut tasks = self.running_tasks.lock().await; + tasks.remove(sched_id); } - // Clear the currently_running flag after execution { let mut jobs_guard = self.jobs.lock().await; - if let Some((_tokio_job_id, job_in_map)) = jobs_guard.get_mut(sched_id) { - job_in_map.currently_running = false; - job_in_map.current_session_id = None; - job_in_map.process_start_time = None; - job_in_map.last_run = Some(Utc::now()); - } // MutexGuard is dropped here + if let Some((_, job)) = jobs_guard.get_mut(sched_id) { + job.currently_running = false; + job.current_session_id = None; + job.process_start_time = None; + job.last_run = Some(Utc::now()); + } } - // Persist after the lock is released and update is made. - self.persist_jobs().await?; + persist_jobs(&self.storage_path, &self.jobs).await?; - match run_result { - Ok(Ok(session_id)) => Ok(session_id), - Ok(Err(e)) => Err(SchedulerError::AnyhowError(anyhow!( - "Failed to execute job '{}' immediately: {}", + match result { + Ok(session_id) => Ok(session_id), + Err(e) => Err(SchedulerError::AnyhowError(anyhow!( + "Job '{}' failed: {}", sched_id, - e.error - ))), - Err(join_error) if join_error.is_cancelled() => { - tracing::info!("Run now job '{}' was cancelled/killed", sched_id); - Err(SchedulerError::AnyhowError(anyhow!( - "Job '{}' was successfully cancelled", - sched_id - ))) - } - Err(join_error) => Err(SchedulerError::AnyhowError(anyhow!( - "Failed to execute job '{}' immediately: {}", - sched_id, - join_error + e ))), } } pub async fn pause_schedule(&self, sched_id: &str) -> Result<(), SchedulerError> { - let mut jobs_guard = self.jobs.lock().await; - match jobs_guard.get_mut(sched_id) { - Some((_, job_def)) => { - if job_def.currently_running { - return Err(SchedulerError::AnyhowError(anyhow!( - "Cannot pause schedule '{}' while it's currently running", - sched_id - ))); + { + let mut jobs_guard = self.jobs.lock().await; + match jobs_guard.get_mut(sched_id) { + Some((_, job)) => { + if job.currently_running { + return Err(SchedulerError::AnyhowError(anyhow!( + "Cannot pause running schedule '{}'", + sched_id + ))); + } + job.paused = true; } - job_def.paused = true; - self.persist_jobs_to_storage_with_guard(&jobs_guard).await?; - Ok(()) + None => return Err(SchedulerError::JobNotFound(sched_id.to_string())), } - None => Err(SchedulerError::JobNotFound(sched_id.to_string())), } + + persist_jobs(&self.storage_path, &self.jobs).await } pub async fn unpause_schedule(&self, sched_id: &str) -> Result<(), SchedulerError> { - let mut jobs_guard = self.jobs.lock().await; - match jobs_guard.get_mut(sched_id) { - Some((_, job_def)) => { - job_def.paused = false; - self.persist_jobs_to_storage_with_guard(&jobs_guard).await?; - Ok(()) + { + let mut jobs_guard = self.jobs.lock().await; + match jobs_guard.get_mut(sched_id) { + Some((_, job)) => job.paused = false, + None => return Err(SchedulerError::JobNotFound(sched_id.to_string())), } - None => Err(SchedulerError::JobNotFound(sched_id.to_string())), } + + persist_jobs(&self.storage_path, &self.jobs).await } pub async fn update_schedule( @@ -777,242 +546,71 @@ impl Scheduler { sched_id: &str, new_cron: String, ) -> Result<(), SchedulerError> { - let mut jobs_guard = self.jobs.lock().await; - match jobs_guard.get_mut(sched_id) { - Some((job_uuid, job_def)) => { - if job_def.currently_running { - return Err(SchedulerError::AnyhowError(anyhow!( - "Cannot edit schedule '{}' while it's currently running", - sched_id - ))); + let (old_uuid, updated_job) = { + let mut jobs_guard = self.jobs.lock().await; + match jobs_guard.get_mut(sched_id) { + Some((uuid, job)) => { + if job.currently_running { + return Err(SchedulerError::AnyhowError(anyhow!( + "Cannot update running schedule '{}'", + sched_id + ))); + } + if new_cron == job.cron { + return Ok(()); + } + job.cron = new_cron.clone(); + (*uuid, job.clone()) } + None => return Err(SchedulerError::JobNotFound(sched_id.to_string())), + } + }; - if new_cron == job_def.cron { - // No change needed - return Ok(()); - } + self.tokio_scheduler + .remove(&old_uuid) + .await + .map_err(|e| SchedulerError::SchedulerInternalError(e.to_string()))?; - // Remove the old job from the scheduler - self.internal_scheduler - .remove(job_uuid) - .await - .map_err(|e| SchedulerError::SchedulerInternalError(e.to_string()))?; - - // Create new job with updated cron - let job_for_task = job_def.clone(); - let jobs_arc_for_task = self.jobs.clone(); - let storage_path_for_task = self.storage_path.clone(); - let running_tasks_for_task = self.running_tasks.clone(); - - tracing::info!( - "Updating job '{}' with new cron expression: '{}'", - sched_id, - new_cron - ); - let normalized_cron = normalize_cron_expression(&new_cron); - // Convert from 7-field (Temporal format) to 6-field (tokio-cron-scheduler format) - let tokio_cron = { - let parts: Vec<&str> = normalized_cron.split_whitespace().collect(); - if parts.len() == 7 { - parts[..6].join(" ") - } else { - normalized_cron.clone() - } - }; - if tokio_cron != new_cron { - tracing::info!( - "Converted cron expression from '{}' to '{}' for tokio-cron-scheduler", - new_cron, - tokio_cron - ); - } - let cron_task = Job::new_async(&tokio_cron, move |_uuid, _l| { - let task_job_id = job_for_task.id.clone(); - let current_jobs_arc = jobs_arc_for_task.clone(); - let local_storage_path = storage_path_for_task.clone(); - let job_to_execute = job_for_task.clone(); - let running_tasks_arc = running_tasks_for_task.clone(); - - Box::pin(async move { - // Check if the job is paused before executing - let should_execute = { - let jobs_map_guard = current_jobs_arc.lock().await; - if let Some((_, current_job_in_map)) = jobs_map_guard.get(&task_job_id) - { - !current_job_in_map.paused - } else { - false - } - }; - - if !should_execute { - tracing::info!("Skipping execution of paused job '{}'", &task_job_id); - return; - } - - let current_time = Utc::now(); - let mut needs_persist = false; - { - let mut jobs_map_guard = current_jobs_arc.lock().await; - if let Some((_, current_job_in_map)) = - jobs_map_guard.get_mut(&task_job_id) - { - current_job_in_map.last_run = Some(current_time); - current_job_in_map.currently_running = true; - current_job_in_map.process_start_time = Some(current_time); - needs_persist = true; - } - } - - if needs_persist { - if let Err(e) = - persist_jobs_from_arc(&local_storage_path, ¤t_jobs_arc).await - { - tracing::error!( - "Failed to persist last_run update for job {}: {}", - &task_job_id, - e - ); - } - } - - // Spawn the job execution as an abortable task - let job_task = tokio::spawn(run_scheduled_job_internal( - job_to_execute, - None, - Some(current_jobs_arc.clone()), - Some(task_job_id.clone()), - )); - - // Store the abort handle at the scheduler level - { - let mut running_tasks_guard = running_tasks_arc.lock().await; - running_tasks_guard - .insert(task_job_id.clone(), job_task.abort_handle()); - } - - // Wait for the job to complete or be aborted - let result = job_task.await; - - // Remove the abort handle - { - let mut running_tasks_guard = running_tasks_arc.lock().await; - running_tasks_guard.remove(&task_job_id); - } - - // Update the job status after execution - { - let mut jobs_map_guard = current_jobs_arc.lock().await; - if let Some((_, current_job_in_map)) = - jobs_map_guard.get_mut(&task_job_id) - { - current_job_in_map.currently_running = false; - current_job_in_map.current_session_id = None; - current_job_in_map.process_start_time = None; - needs_persist = true; - } - } - - if needs_persist { - if let Err(e) = - persist_jobs_from_arc(&local_storage_path, ¤t_jobs_arc).await - { - tracing::error!( - "Failed to persist running status update for job {}: {}", - &task_job_id, - e - ); - } - } - - match result { - Ok(Ok(_session_id)) => { - tracing::info!( - "Scheduled job '{}' completed successfully", - &task_job_id - ); - } - Ok(Err(e)) => { - tracing::error!( - "Scheduled job '{}' execution failed: {}", - &e.job_id, - e.error - ); - } - Err(join_error) if join_error.is_cancelled() => { - tracing::info!( - "Scheduled job '{}' was cancelled/killed", - &task_job_id - ); - } - Err(join_error) => { - tracing::error!( - "Scheduled job '{}' task failed: {}", - &task_job_id, - join_error - ); - } - } - }) - }) - .map_err(|e| SchedulerError::CronParseError(e.to_string()))?; - - let new_job_uuid = self - .internal_scheduler - .add(cron_task) - .await - .map_err(|e| SchedulerError::SchedulerInternalError(e.to_string()))?; - - // Update the job UUID and cron expression - *job_uuid = new_job_uuid; - job_def.cron = new_cron; - - self.persist_jobs_to_storage_with_guard(&jobs_guard).await?; - Ok(()) + let cron_task = self.create_cron_task(updated_job)?; + let new_uuid = self + .tokio_scheduler + .add(cron_task) + .await + .map_err(|e| SchedulerError::SchedulerInternalError(e.to_string()))?; + + { + let mut jobs_guard = self.jobs.lock().await; + if let Some((uuid, _)) = jobs_guard.get_mut(sched_id) { + *uuid = new_uuid; } - None => Err(SchedulerError::JobNotFound(sched_id.to_string())), } + + persist_jobs(&self.storage_path, &self.jobs).await } pub async fn kill_running_job(&self, sched_id: &str) -> Result<(), SchedulerError> { - let mut jobs_guard = self.jobs.lock().await; - match jobs_guard.get_mut(sched_id) { - Some((_, job_def)) => { - if !job_def.currently_running { + { + let jobs_guard = self.jobs.lock().await; + match jobs_guard.get(sched_id) { + Some((_, job)) if !job.currently_running => { return Err(SchedulerError::AnyhowError(anyhow!( - "Schedule '{}' is not currently running", + "Schedule '{}' is not running", sched_id ))); } + None => return Err(SchedulerError::JobNotFound(sched_id.to_string())), + _ => {} + } + } - tracing::info!("Killing running job '{}'", sched_id); - - // Abort the running task if it exists - { - let mut running_tasks_guard = self.running_tasks.lock().await; - if let Some(abort_handle) = running_tasks_guard.remove(sched_id) { - abort_handle.abort(); - tracing::info!("Aborted running task for job '{}'", sched_id); - } else { - tracing::warn!( - "No abort handle found for job '{}' in running tasks map", - sched_id - ); - } - } - - // Mark the job as no longer running - job_def.currently_running = false; - job_def.current_session_id = None; - job_def.process_start_time = None; - - self.persist_jobs_to_storage_with_guard(&jobs_guard).await?; - - tracing::info!("Successfully killed job '{}'", sched_id); - Ok(()) + { + let tasks = self.running_tasks.lock().await; + if let Some(token) = tasks.get(sched_id) { + token.cancel(); } - None => Err(SchedulerError::JobNotFound(sched_id.to_string())), } + + Ok(()) } pub async fn get_running_job_info( @@ -1021,180 +619,73 @@ impl Scheduler { ) -> Result)>, SchedulerError> { let jobs_guard = self.jobs.lock().await; match jobs_guard.get(sched_id) { - Some((_, job_def)) => { - if job_def.currently_running { - if let (Some(session_id), Some(start_time)) = - (&job_def.current_session_id, &job_def.process_start_time) - { - Ok(Some((session_id.clone(), *start_time))) - } else { - Ok(None) - } - } else { - Ok(None) + Some((_, job)) if job.currently_running => { + match (&job.current_session_id, &job.process_start_time) { + (Some(sid), Some(start)) => Ok(Some((sid.clone(), *start))), + _ => Ok(None), } } + Some(_) => Ok(None), None => Err(SchedulerError::JobNotFound(sched_id.to_string())), } } } -#[derive(Debug)] -struct JobExecutionError { - job_id: String, - error: String, -} - -async fn run_scheduled_job_internal( +async fn execute_job( job: ScheduledJob, - provider_override: Option>, - jobs_arc: Option>>, - job_id: Option, -) -> std::result::Result { - tracing::info!("Executing job: {} (Source: {})", job.id, job.source); + jobs: Arc>, + job_id: String, + cancel_token: CancellationToken, +) -> Result { + if job.source.is_empty() { + return Ok(job.id.to_string()); + } let recipe_path = Path::new(&job.source); - - let recipe_content = match fs::read_to_string(recipe_path) { - Ok(content) => content, - Err(e) => { - return Err(JobExecutionError { - job_id: job.id.clone(), - error: format!("Failed to load recipe file '{}': {}", job.source, e), - }); - } - }; + let recipe_content = fs::read_to_string(recipe_path)?; let recipe: Recipe = { let extension = recipe_path .extension() - .and_then(|os_str| os_str.to_str()) + .and_then(|s| s.to_str()) .unwrap_or("yaml") .to_lowercase(); match extension.as_str() { - "json" | "jsonl" => { - serde_json::from_str::(&recipe_content).map_err(|e| JobExecutionError { - job_id: job.id.clone(), - error: format!("Failed to parse JSON recipe '{}': {}", job.source, e), - }) - } - "yaml" | "yml" => { - serde_yaml::from_str::(&recipe_content).map_err(|e| JobExecutionError { - job_id: job.id.clone(), - error: format!("Failed to parse YAML recipe '{}': {}", job.source, e), - }) - } - _ => Err(JobExecutionError { - job_id: job.id.clone(), - error: format!( - "Unsupported recipe file extension '{}' for: {}", - extension, job.source - ), - }), + "json" | "jsonl" => serde_json::from_str(&recipe_content)?, + _ => serde_yaml::from_str(&recipe_content)?, } - }?; - - let agent: Agent = Agent::new(); - - let agent_provider: Arc; - - if let Some(provider) = provider_override { - agent_provider = provider; - } else { - let global_config = Config::global(); - let provider_name: String = match global_config.get_goose_provider() { - Ok(name) => name, - Err(_) => return Err(JobExecutionError { - job_id: job.id.clone(), - error: - "GOOSE_PROVIDER not configured globally. Run 'goose configure' or set env var." - .to_string(), - }), - }; - let model_name: String = - match global_config.get_goose_model() { - Ok(name) => name, - Err(_) => return Err(JobExecutionError { - job_id: job.id.clone(), - error: - "GOOSE_MODEL not configured globally. Run 'goose configure' or set env var." - .to_string(), - }), - }; - let model_config = - crate::model::ModelConfig::new(model_name.as_str()).map_err(|e| JobExecutionError { - job_id: job.id.clone(), - error: format!("Model config error: {}", e), - })?; - - agent_provider = - create(&provider_name, model_config) - .await - .map_err(|e| JobExecutionError { - job_id: job.id.clone(), - error: format!( - "Failed to create provider instance '{}': {}", - provider_name, e - ), - })?; - } + }; + + let agent = Agent::new(); - if let Some(ref recipe_extensions) = recipe.extensions { - for extension in recipe_extensions { - agent - .add_extension(extension.clone()) - .await - .map_err(|e| JobExecutionError { - job_id: job.id.clone(), - error: format!("Failed to add extension '{}': {}", extension.name(), e), - })?; + let config = Config::global(); + let provider_name = config.get_goose_provider()?; + let model_name = config.get_goose_model()?; + let model_config = crate::model::ModelConfig::new(&model_name)?; + + let agent_provider = create(&provider_name, model_config).await?; + + if let Some(ref extensions) = recipe.extensions { + for ext in extensions { + agent.add_extension(ext.clone()).await?; } } - if let Err(e) = agent.update_provider(agent_provider).await { - return Err(JobExecutionError { - job_id: job.id.clone(), - error: format!("Failed to set provider on agent: {}", e), - }); - } - tracing::info!("Agent configured with provider for job '{}'", job.id); - - let current_dir = match std::env::current_dir() { - Ok(cd) => cd, - Err(e) => { - return Err(JobExecutionError { - job_id: job.id.clone(), - error: format!("Failed to get current directory for job execution: {}", e), - }); - } - }; + agent.update_provider(agent_provider).await?; - let session = match SessionManager::create_session( - current_dir.clone(), + let session = SessionManager::create_session( + std::env::current_dir()?, format!("Scheduled job: {}", job.id), SessionType::Scheduled, ) - .await - { - Ok(s) => s, - Err(e) => { - return Err(JobExecutionError { - job_id: job.id.clone(), - error: format!("Failed to create session: {}", e), - }); - } - }; + .await?; - // Update the job with the session ID if we have access to the jobs arc - if let (Some(jobs_arc), Some(job_id_str)) = (jobs_arc.as_ref(), job_id.as_ref()) { - let mut jobs_guard = jobs_arc.lock().await; - if let Some((_, job_def)) = jobs_guard.get_mut(job_id_str) { - job_def.current_session_id = Some(session.id.clone()); - } + let mut jobs_guard = jobs.lock().await; + if let Some((_, job_def)) = jobs_guard.get_mut(job_id.as_str()) { + job_def.current_session_id = Some(session.id.clone()); } - // Use prompt if available, otherwise fall back to instructions let prompt_text = recipe .prompt .as_ref() @@ -1211,46 +702,32 @@ async fn run_scheduled_job_internal( retry_config: None, }; - let session_id = Some(session_config.id.clone()); - match crate::session_context::with_session_id(session_id, async { - agent.reply(user_message, session_config, None).await + let session_id = session_config.id.clone(); + let stream = crate::session_context::with_session_id(Some(session_id.clone()), async { + agent + .reply(user_message, session_config, Some(cancel_token)) + .await }) - .await - { - Ok(mut stream) => { - use futures::StreamExt; - - while let Some(message_result) = stream.next().await { - tokio::task::yield_now().await; - - match message_result { - Ok(AgentEvent::Message(msg)) => { - if msg.role == rmcp::model::Role::Assistant { - tracing::info!("[Job {}] Assistant: {:?}", job.id, msg.content); - } - conversation.push(msg); - } - Ok(AgentEvent::McpNotification(_)) => {} - Ok(AgentEvent::ModelChange { .. }) => {} - Ok(AgentEvent::HistoryReplaced(updated_conversation)) => { - conversation = updated_conversation; - } - Err(e) => { - tracing::error!( - "[Job {}] Error receiving message from agent: {}", - job.id, - e - ); - break; - } - } + .await?; + + use futures::StreamExt; + let mut stream = std::pin::pin!(stream); + + while let Some(message_result) = stream.next().await { + tokio::task::yield_now().await; + + match message_result { + Ok(AgentEvent::Message(msg)) => { + conversation.push(msg); + } + Ok(AgentEvent::HistoryReplaced(updated)) => { + conversation = updated; + } + Ok(_) => {} + Err(e) => { + tracing::error!("Error in agent stream: {}", e); + break; } - } - Err(e) => { - return Err(JobExecutionError { - job_id: job.id.clone(), - error: format!("Agent failed to reply for recipe '{}': {}", job.source, e), - }); } } @@ -1260,10 +737,9 @@ async fn run_scheduled_job_internal( .apply() .await { - tracing::error!("[Job {}] Failed to update session metadata: {}", job.id, e); + tracing::error!("Failed to update session: {}", e); } - tracing::info!("Finished job: {}", job.id); Ok(session.id) } @@ -1273,8 +749,8 @@ impl SchedulerTrait for Scheduler { self.add_scheduled_job(job).await } - async fn list_scheduled_jobs(&self) -> Result, SchedulerError> { - Ok(self.list_scheduled_jobs().await) + async fn list_scheduled_jobs(&self) -> Vec { + self.list_scheduled_jobs().await } async fn remove_scheduled_job(&self, id: &str) -> Result<(), SchedulerError> { @@ -1324,125 +800,51 @@ impl SchedulerTrait for Scheduler { #[cfg(test)] mod tests { use super::*; - use crate::recipe::Recipe; - use crate::{ - model::ModelConfig, - providers::base::{ProviderMetadata, ProviderUsage, Usage}, - providers::errors::ProviderError, - }; - use rmcp::model::Tool; - use rmcp::model::{AnnotateAble, RawTextContent, Role}; - - use crate::conversation::message::{Message, MessageContent}; - use std::env; - use std::fs::{self, File}; - use std::io::Write; use tempfile::tempdir; + use tokio::time::{sleep, Duration}; - #[derive(Clone)] - struct MockSchedulerTestProvider { - model_config: ModelConfig, + fn create_test_recipe(dir: &Path, name: &str) -> PathBuf { + let recipe_path = dir.join(format!("{}.yaml", name)); + fs::write(&recipe_path, "prompt: test\n").unwrap(); + recipe_path } - #[async_trait::async_trait] - impl GooseProvider for MockSchedulerTestProvider { - fn metadata() -> ProviderMetadata { - ProviderMetadata::new( - "mock-scheduler-test", - "Mock for Scheduler Test", - "A mock provider for scheduler tests", // description - "test-model", // default_model - vec!["test-model"], // model_names - "", // model_doc_link (empty string if not applicable) - vec![], // config_keys (empty vec if none) - ) - } - - fn get_name(&self) -> &str { - "mock-scheduler" - } - - fn get_model_config(&self) -> ModelConfig { - self.model_config.clone() - } + #[tokio::test] + async fn test_job_runs_on_schedule() { + let temp_dir = tempdir().unwrap(); + let storage_path = temp_dir.path().join("schedules.json"); + let recipe_path = create_test_recipe(temp_dir.path(), "scheduled_job"); + let scheduler = Scheduler::new(storage_path).await.unwrap(); + + let job = ScheduledJob { + id: "scheduled_job".to_string(), + source: recipe_path.to_string_lossy().to_string(), + cron: "* * * * * *".to_string(), + last_run: None, + currently_running: false, + paused: false, + current_session_id: None, + process_start_time: None, + }; - async fn complete_with_model( - &self, - _model_config: &ModelConfig, - _system: &str, - _messages: &[Message], - _tools: &[Tool], - ) -> Result<(Message, ProviderUsage), ProviderError> { - Ok(( - Message::new( - Role::Assistant, - Utc::now().timestamp(), - vec![MessageContent::Text( - RawTextContent { - text: "Mocked scheduled response".to_string(), - meta: None, - } - .no_annotation(), - )], - ), - ProviderUsage::new("mock-scheduler-test".to_string(), Usage::default()), - )) - } - } + scheduler.add_scheduled_job(job).await.unwrap(); + sleep(Duration::from_millis(1500)).await; - // This function is pub(super) making it visible to run_scheduled_job_internal (parent module) - // when cfg(test) is active for the whole compilation unit. - pub(super) fn create_scheduler_test_mock_provider( - model_config: ModelConfig, - ) -> Arc { - Arc::new(MockSchedulerTestProvider { model_config }) + let jobs = scheduler.list_scheduled_jobs().await; + assert!(jobs[0].last_run.is_some(), "Job should have run"); } #[tokio::test] - async fn test_scheduled_session_has_schedule_id() -> Result<(), Box> { - // Set environment variables for the test - env::set_var("GOOSE_PROVIDER", "test_provider"); - env::set_var("GOOSE_MODEL", "test_model"); - - let temp_dir = tempdir()?; - let recipe_dir = temp_dir.path().join("recipes_for_test_scheduler"); - fs::create_dir_all(&recipe_dir)?; - - let _ = crate::session::session_manager::ensure_session_dir() - .expect("Failed to ensure app session dir"); - - let schedule_id_str = "test_schedule_001_scheduler_check".to_string(); - let recipe_filename = recipe_dir.join(format!("{}.json", schedule_id_str)); - - let dummy_recipe = Recipe { - version: "1.0.0".to_string(), - title: "Test Schedule ID Recipe".to_string(), - description: "A recipe for testing schedule_id propagation.".to_string(), - instructions: None, - prompt: Some("This is a test prompt for a scheduled job.".to_string()), - extensions: None, - - activities: None, - author: None, - parameters: None, - settings: None, - response: None, - sub_recipes: None, - retry: None, - }; - let mut recipe_file = File::create(&recipe_filename)?; - writeln!( - recipe_file, - "{}", - serde_json::to_string_pretty(&dummy_recipe)? - )?; - recipe_file.flush()?; - drop(recipe_file); - - let dummy_job = ScheduledJob { - id: schedule_id_str.clone(), - source: recipe_filename.to_string_lossy().into_owned(), - cron: "* * * * * * ".to_string(), // Runs every second for quick testing + async fn test_paused_job_does_not_run() { + let temp_dir = tempdir().unwrap(); + let storage_path = temp_dir.path().join("schedules.json"); + let recipe_path = create_test_recipe(temp_dir.path(), "paused_job"); + let scheduler = Scheduler::new(storage_path).await.unwrap(); + + let job = ScheduledJob { + id: "paused_job".to_string(), + source: recipe_path.to_string_lossy().to_string(), + cron: "* * * * * *".to_string(), last_run: None, currently_running: false, paused: false, @@ -1450,46 +852,11 @@ mod tests { process_start_time: None, }; - let mock_model_config = ModelConfig::new_or_fail("test_model"); - let mock_provider_instance = create_scheduler_test_mock_provider(mock_model_config); - - // Call run_scheduled_job_internal, passing the mock provider - let created_session_id = - run_scheduled_job_internal(dummy_job.clone(), Some(mock_provider_instance), None, None) - .await - .expect("run_scheduled_job_internal failed"); - - let session = SessionManager::get_session(&created_session_id, true).await?; - let schedule_id = session.schedule_id.clone(); - - assert_eq!( - schedule_id, - Some(schedule_id_str.clone()), - "Session metadata schedule_id ({:?}) does not match the job ID ({}). Session: {}", - schedule_id, - schedule_id_str, - created_session_id - ); - - // Check if messages were written using SessionManager - let messages_in_session = session.conversation.unwrap_or_default(); - assert!( - !messages_in_session.is_empty(), - "No messages were written to the session: {}", - created_session_id - ); - // We expect at least a user prompt and an assistant response - assert!( - messages_in_session.len() >= 2, - "Expected at least 2 messages (prompt + response), found {} in session: {}", - messages_in_session.len(), - created_session_id - ); + scheduler.add_scheduled_job(job).await.unwrap(); + scheduler.pause_schedule("paused_job").await.unwrap(); + sleep(Duration::from_millis(1500)).await; - // Clean up environment variables - env::remove_var("GOOSE_PROVIDER"); - env::remove_var("GOOSE_MODEL"); - - Ok(()) + let jobs = scheduler.list_scheduled_jobs().await; + assert!(jobs[0].last_run.is_none(), "Paused job should not run"); } } diff --git a/crates/goose/src/scheduler_factory.rs b/crates/goose/src/scheduler_factory.rs deleted file mode 100644 index cba51d4aa85e..000000000000 --- a/crates/goose/src/scheduler_factory.rs +++ /dev/null @@ -1,26 +0,0 @@ -use std::path::PathBuf; -use std::sync::Arc; - -use crate::scheduler::{Scheduler, SchedulerError}; -use crate::scheduler_trait::SchedulerTrait; - -/// Factory for creating scheduler instances -pub struct SchedulerFactory; - -impl SchedulerFactory { - /// Create a scheduler instance - pub async fn create(storage_path: PathBuf) -> Result, SchedulerError> { - tracing::info!("Creating scheduler"); - let scheduler = Scheduler::new(storage_path).await?; - Ok(scheduler as Arc) - } - - /// Create a scheduler (for testing or explicit use) - pub async fn create_legacy( - storage_path: PathBuf, - ) -> Result, SchedulerError> { - tracing::info!("Creating scheduler (explicit)"); - let scheduler = Scheduler::new(storage_path).await?; - Ok(scheduler as Arc) - } -} diff --git a/crates/goose/src/scheduler_trait.rs b/crates/goose/src/scheduler_trait.rs index c4ef864576d9..019dcf2c7ea0 100644 --- a/crates/goose/src/scheduler_trait.rs +++ b/crates/goose/src/scheduler_trait.rs @@ -4,42 +4,22 @@ use chrono::{DateTime, Utc}; use crate::scheduler::{ScheduledJob, SchedulerError}; use crate::session::Session; -/// Common trait for all scheduler implementations #[async_trait] pub trait SchedulerTrait: Send + Sync { - /// Add a new scheduled job async fn add_scheduled_job(&self, job: ScheduledJob) -> Result<(), SchedulerError>; - - /// List all scheduled jobs - async fn list_scheduled_jobs(&self) -> Result, SchedulerError>; - - /// Remove a scheduled job by ID + async fn list_scheduled_jobs(&self) -> Vec; async fn remove_scheduled_job(&self, id: &str) -> Result<(), SchedulerError>; - - /// Pause a scheduled job async fn pause_schedule(&self, id: &str) -> Result<(), SchedulerError>; - - /// Unpause a scheduled job async fn unpause_schedule(&self, id: &str) -> Result<(), SchedulerError>; - - /// Run a job immediately async fn run_now(&self, id: &str) -> Result; - - /// Get sessions for a scheduled job async fn sessions( &self, sched_id: &str, limit: usize, ) -> Result, SchedulerError>; - - /// Update a schedule's cron expression async fn update_schedule(&self, sched_id: &str, new_cron: String) -> Result<(), SchedulerError>; - - /// Kill a running job async fn kill_running_job(&self, sched_id: &str) -> Result<(), SchedulerError>; - - /// Get information about a running job async fn get_running_job_info( &self, sched_id: &str, diff --git a/crates/goose/tests/agent.rs b/crates/goose/tests/agent.rs index 7478119027fa..5a4389a4dea6 100644 --- a/crates/goose/tests/agent.rs +++ b/crates/goose/tests/agent.rs @@ -40,9 +40,9 @@ mod tests { Ok(()) } - async fn list_scheduled_jobs(&self) -> Result, SchedulerError> { + async fn list_scheduled_jobs(&self) -> Vec { let jobs = self.jobs.lock().await; - Ok(jobs.clone()) + jobs.clone() } async fn remove_scheduled_job(&self, id: &str) -> Result<(), SchedulerError> { diff --git a/crates/goose/tests/test_support.rs b/crates/goose/tests/test_support.rs deleted file mode 100644 index 49dd89d885b7..000000000000 --- a/crates/goose/tests/test_support.rs +++ /dev/null @@ -1,401 +0,0 @@ -#![cfg(test)] - -use std::collections::{HashMap, HashSet}; -use std::path::PathBuf; -use std::sync::Arc; - -use async_trait::async_trait; -use chrono::{DateTime, Utc}; -use tempfile::TempDir; -use tokio::sync::Mutex; - -use goose::agents::Agent; -use goose::scheduler::{ScheduledJob, SchedulerError}; -use goose::scheduler_trait::SchedulerTrait; -use goose::session::Session; - -#[derive(Debug, Clone)] -pub enum MockBehavior { - Success, - NotFound(String), - AlreadyExists(String), - InternalError(String), - JobCurrentlyRunning(String), -} - -#[derive(Clone)] -pub struct ConfigurableMockScheduler { - jobs: Arc>>, - running_jobs: Arc>>, - call_log: Arc>>, - behaviors: Arc>>, - #[allow(clippy::type_complexity)] - sessions_data: Arc>>>, -} - -#[allow(dead_code)] -impl Default for ConfigurableMockScheduler { - fn default() -> Self { - Self::new() - } -} - -impl ConfigurableMockScheduler { - pub fn new() -> Self { - Self { - jobs: Arc::new(Mutex::new(HashMap::new())), - running_jobs: Arc::new(Mutex::new(HashSet::new())), - call_log: Arc::new(Mutex::new(Vec::new())), - behaviors: Arc::new(Mutex::new(HashMap::new())), - sessions_data: Arc::new(Mutex::new(HashMap::new())), - } - } - - pub async fn get_calls(&self) -> Vec { - self.call_log.lock().await.clone() - } - - async fn log_call(&self, method: &str) { - self.call_log.lock().await.push(method.to_string()); - } - - async fn get_behavior(&self, method: &str) -> MockBehavior { - self.behaviors - .lock() - .await - .get(method) - .cloned() - .unwrap_or(MockBehavior::Success) - } -} - -#[async_trait] -impl SchedulerTrait for ConfigurableMockScheduler { - async fn add_scheduled_job(&self, job: ScheduledJob) -> Result<(), SchedulerError> { - self.log_call("add_scheduled_job").await; - - match self.get_behavior("add_scheduled_job").await { - MockBehavior::Success => { - let mut jobs = self.jobs.lock().await; - if jobs.contains_key(&job.id) { - return Err(SchedulerError::JobIdExists(job.id)); - } - jobs.insert(job.id.clone(), job); - Ok(()) - } - MockBehavior::AlreadyExists(id) => Err(SchedulerError::JobIdExists(id)), - MockBehavior::InternalError(msg) => Err(SchedulerError::SchedulerInternalError(msg)), - _ => Ok(()), - } - } - - async fn list_scheduled_jobs(&self) -> Result, SchedulerError> { - self.log_call("list_scheduled_jobs").await; - - match self.get_behavior("list_scheduled_jobs").await { - MockBehavior::Success => { - let jobs = self.jobs.lock().await; - Ok(jobs.values().cloned().collect()) - } - MockBehavior::InternalError(msg) => Err(SchedulerError::SchedulerInternalError(msg)), - _ => Ok(vec![]), - } - } - - async fn remove_scheduled_job(&self, id: &str) -> Result<(), SchedulerError> { - self.log_call("remove_scheduled_job").await; - - match self.get_behavior("remove_scheduled_job").await { - MockBehavior::Success => { - let mut jobs = self.jobs.lock().await; - if jobs.remove(id).is_some() { - Ok(()) - } else { - Err(SchedulerError::JobNotFound(id.to_string())) - } - } - MockBehavior::NotFound(job_id) => Err(SchedulerError::JobNotFound(job_id)), - MockBehavior::InternalError(msg) => Err(SchedulerError::SchedulerInternalError(msg)), - _ => Ok(()), - } - } - - async fn pause_schedule(&self, id: &str) -> Result<(), SchedulerError> { - self.log_call("pause_schedule").await; - - match self.get_behavior("pause_schedule").await { - MockBehavior::Success => { - let jobs = self.jobs.lock().await; - if jobs.contains_key(id) { - Ok(()) - } else { - Err(SchedulerError::JobNotFound(id.to_string())) - } - } - MockBehavior::NotFound(job_id) => Err(SchedulerError::JobNotFound(job_id)), - MockBehavior::JobCurrentlyRunning(job_id) => { - Err(SchedulerError::AnyhowError(anyhow::anyhow!( - "Cannot pause schedule '{}' while it's currently running", - job_id - ))) - } - MockBehavior::InternalError(msg) => Err(SchedulerError::SchedulerInternalError(msg)), - _ => Ok(()), - } - } - - async fn unpause_schedule(&self, id: &str) -> Result<(), SchedulerError> { - self.log_call("unpause_schedule").await; - - match self.get_behavior("unpause_schedule").await { - MockBehavior::Success => { - let jobs = self.jobs.lock().await; - if jobs.contains_key(id) { - Ok(()) - } else { - Err(SchedulerError::JobNotFound(id.to_string())) - } - } - MockBehavior::NotFound(job_id) => Err(SchedulerError::JobNotFound(job_id)), - MockBehavior::InternalError(msg) => Err(SchedulerError::SchedulerInternalError(msg)), - _ => Ok(()), - } - } - - async fn run_now(&self, id: &str) -> Result { - self.log_call("run_now").await; - - match self.get_behavior("run_now").await { - MockBehavior::Success => { - let jobs = self.jobs.lock().await; - if jobs.contains_key(id) { - Ok(format!("{}_session_{}", id, chrono::Utc::now().timestamp())) - } else { - Err(SchedulerError::JobNotFound(id.to_string())) - } - } - MockBehavior::NotFound(job_id) => Err(SchedulerError::JobNotFound(job_id)), - MockBehavior::InternalError(msg) => Err(SchedulerError::SchedulerInternalError(msg)), - _ => Ok("mock_session_123".to_string()), - } - } - - async fn sessions( - &self, - sched_id: &str, - limit: usize, - ) -> Result, SchedulerError> { - self.log_call("sessions").await; - - match self.get_behavior("sessions").await { - MockBehavior::Success => { - let sessions_data = self.sessions_data.lock().await; - let sessions = sessions_data.get(sched_id).cloned().unwrap_or_default(); - Ok(sessions.into_iter().take(limit).collect()) - } - MockBehavior::NotFound(job_id) => Err(SchedulerError::JobNotFound(job_id)), - MockBehavior::InternalError(msg) => Err(SchedulerError::SchedulerInternalError(msg)), - _ => Ok(vec![]), - } - } - - async fn update_schedule( - &self, - sched_id: &str, - _new_cron: String, - ) -> Result<(), SchedulerError> { - self.log_call("update_schedule").await; - - match self.get_behavior("update_schedule").await { - MockBehavior::Success => { - let jobs = self.jobs.lock().await; - if jobs.contains_key(sched_id) { - Ok(()) - } else { - Err(SchedulerError::JobNotFound(sched_id.to_string())) - } - } - MockBehavior::NotFound(job_id) => Err(SchedulerError::JobNotFound(job_id)), - MockBehavior::InternalError(msg) => Err(SchedulerError::SchedulerInternalError(msg)), - _ => Ok(()), - } - } - - async fn kill_running_job(&self, sched_id: &str) -> Result<(), SchedulerError> { - self.log_call("kill_running_job").await; - - match self.get_behavior("kill_running_job").await { - MockBehavior::Success => { - let running_jobs = self.running_jobs.lock().await; - if running_jobs.contains(sched_id) { - Ok(()) - } else { - Err(SchedulerError::AnyhowError(anyhow::anyhow!( - "Schedule '{}' is not currently running", - sched_id - ))) - } - } - MockBehavior::NotFound(job_id) => Err(SchedulerError::JobNotFound(job_id)), - MockBehavior::InternalError(msg) => Err(SchedulerError::SchedulerInternalError(msg)), - _ => Ok(()), - } - } - - async fn get_running_job_info( - &self, - sched_id: &str, - ) -> Result)>, SchedulerError> { - self.log_call("get_running_job_info").await; - - match self.get_behavior("get_running_job_info").await { - MockBehavior::Success => { - let running_jobs = self.running_jobs.lock().await; - if running_jobs.contains(sched_id) { - Ok(Some((format!("{}_session", sched_id), Utc::now()))) - } else { - Ok(None) - } - } - MockBehavior::NotFound(job_id) => Err(SchedulerError::JobNotFound(job_id)), - MockBehavior::InternalError(msg) => Err(SchedulerError::SchedulerInternalError(msg)), - _ => Ok(None), - } - } -} - -// Helper for creating temp recipe files -pub struct TempRecipe { - pub path: PathBuf, - _temp_dir: TempDir, // Keep alive -} - -pub fn create_temp_recipe(valid: bool, format: &str) -> TempRecipe { - let temp_dir = tempfile::tempdir().unwrap(); - let filename = format!("test_recipe.{}", format); - let path = temp_dir.path().join(filename); - - let content = if valid { - match format { - "json" => { - r#"{ - "version": "1.0.0", - "title": "Test Recipe", - "description": "A test recipe", - "prompt": "Hello world" -}"# - } - "yaml" | "yml" => { - r#"version: "1.0.0" -title: "Test Recipe" -description: "A test recipe" -prompt: "Hello world" -"# - } - _ => panic!("Unsupported format: {}", format), - } - } else { - match format { - "json" => r#"{"invalid": json syntax"#, - "yaml" | "yml" => "invalid:\n - yaml: syntax: error", - _ => "invalid content", - } - }; - - std::fs::write(&path, content).unwrap(); - TempRecipe { - path, - _temp_dir: temp_dir, - } -} - -// Test builder for easy setup -pub struct ScheduleToolTestBuilder { - scheduler: Arc, -} - -impl Default for ScheduleToolTestBuilder { - fn default() -> Self { - Self::new() - } -} - -impl ScheduleToolTestBuilder { - pub fn new() -> Self { - Self { - scheduler: Arc::new(ConfigurableMockScheduler::new()), - } - } - - pub async fn with_scheduler_behavior(self, method: &str, behavior: MockBehavior) -> Self { - { - let mut behaviors = self.scheduler.behaviors.lock().await; - behaviors.insert(method.to_string(), behavior); - } - self - } - - pub async fn with_existing_job(self, job_id: &str, cron: &str) -> Self { - let job = ScheduledJob { - id: job_id.to_string(), - source: "/tmp/test.json".to_string(), - cron: cron.to_string(), - last_run: None, - currently_running: false, - paused: false, - current_session_id: None, - process_start_time: None, - }; - { - let mut jobs = self.scheduler.jobs.lock().await; - jobs.insert(job.id.clone(), job); - } - self - } - - pub async fn with_running_job(self, job_id: &str) -> Self { - { - let mut running_jobs = self.scheduler.running_jobs.lock().await; - running_jobs.insert(job_id.to_string()); - } - self - } - - pub async fn with_sessions_data(self, job_id: &str, sessions: Vec<(String, Session)>) -> Self { - { - let mut sessions_data = self.scheduler.sessions_data.lock().await; - sessions_data.insert(job_id.to_string(), sessions); - } - self - } - - pub async fn build(self) -> (Agent, Arc) { - let agent = Agent::new(); - agent.set_scheduler(self.scheduler.clone()).await; - (agent, self.scheduler) - } -} - -pub fn create_test_session_metadata(message_count: usize, working_dir: &str) -> Session { - Session { - id: "".to_string(), - working_dir: PathBuf::from(working_dir), - name: "Test session".to_string(), - user_set_name: false, - created_at: Default::default(), - schedule_id: Some("test_job".to_string()), - recipe: None, - total_tokens: Some(100), - input_tokens: Some(50), - output_tokens: Some(50), - accumulated_total_tokens: Some(100), - accumulated_input_tokens: Some(50), - accumulated_output_tokens: Some(50), - extension_data: Default::default(), - updated_at: Default::default(), - conversation: None, - message_count, - user_recipe_values: None, - session_type: Default::default(), - } -} diff --git a/ui/desktop/openapi.json b/ui/desktop/openapi.json index 112e24381885..fd45570d58bf 100644 --- a/ui/desktop/openapi.json +++ b/ui/desktop/openapi.json @@ -1672,10 +1672,9 @@ { "name": "limit", "in": "query", - "required": false, + "required": true, "schema": { "type": "integer", - "format": "int32", "minimum": 0 } } @@ -2353,10 +2352,6 @@ "cron": { "type": "string" }, - "execution_mode": { - "type": "string", - "nullable": true - }, "id": { "type": "string" }, @@ -4400,10 +4395,12 @@ }, "SessionsQuery": { "type": "object", + "required": [ + "limit" + ], "properties": { "limit": { "type": "integer", - "format": "int32", "minimum": 0 } } diff --git a/ui/desktop/src/api/types.gen.ts b/ui/desktop/src/api/types.gen.ts index 4826182d1b54..75de116e5690 100644 --- a/ui/desktop/src/api/types.gen.ts +++ b/ui/desktop/src/api/types.gen.ts @@ -90,7 +90,6 @@ export type CreateRecipeResponse = { export type CreateScheduleRequest = { cron: string; - execution_mode?: string | null; id: string; recipe_source: string; }; @@ -732,7 +731,7 @@ export type SessionListResponse = { export type SessionType = 'user' | 'scheduled' | 'sub_agent' | 'hidden'; export type SessionsQuery = { - limit?: number; + limit: number; }; export type SetProviderRequest = { @@ -2209,8 +2208,8 @@ export type SessionsHandlerData = { */ id: string; }; - query?: { - limit?: number; + query: { + limit: number; }; url: '/schedule/{id}/sessions'; }; diff --git a/ui/desktop/src/components/schedule/CreateScheduleModal.tsx b/ui/desktop/src/components/schedule/CreateScheduleModal.tsx deleted file mode 100644 index a9d9f1c3fd80..000000000000 --- a/ui/desktop/src/components/schedule/CreateScheduleModal.tsx +++ /dev/null @@ -1,894 +0,0 @@ -import React, { useState, useEffect, FormEvent, useCallback } from 'react'; -import { Card } from '../ui/card'; -import { Button } from '../ui/button'; -import { Input } from '../ui/input'; -import { Select } from '../ui/Select'; -import cronstrue from 'cronstrue'; -import * as yaml from 'yaml'; -import { Recipe, decodeRecipe } from '../../recipe'; -import { getStorageDirectory } from '../../recipe/recipe_management'; -import ClockIcon from '../../assets/clock-icon.svg'; - -type FrequencyValue = 'once' | 'every' | 'daily' | 'weekly' | 'monthly'; - -type CustomIntervalUnit = 'minute' | 'hour' | 'day'; - -interface FrequencyOption { - value: FrequencyValue; - label: string; -} - -export interface NewSchedulePayload { - id: string; - recipe_source: string; - cron: string; - execution_mode?: string; -} - -interface CreateScheduleModalProps { - isOpen: boolean; - onClose: () => void; - onSubmit: (payload: NewSchedulePayload) => Promise; - isLoadingExternally: boolean; - apiErrorExternally: string | null; - initialDeepLink?: string | null; -} - -// Interface for clean extension in YAML -interface CleanExtension { - name: string; - type: 'stdio' | 'sse' | 'builtin' | 'frontend' | 'streamable_http'; - cmd?: string; - args?: string[]; - uri?: string; - display_name?: string; - tools?: unknown[]; - instructions?: string; - env_keys?: string[]; - timeout?: number; - description?: string; - bundled?: boolean; -} - -// TODO: This 'Recipe' interface should be converted to match the OpenAPI spec for Recipe -// once we have separated the recipe from the schedule in the frontend. -// Interface for clean recipe in YAML -interface CleanRecipe { - title: string; - description: string; - instructions?: string; - prompt?: string; - activities?: string[]; - extensions?: CleanExtension[]; - author?: { - contact?: string; - metadata?: string; - }; - schedule?: { - foreground: boolean; - fallback_to_background: boolean; - window_title?: string; - working_directory?: string; - }; -} - -const frequencies: FrequencyOption[] = [ - { value: 'once', label: 'Once' }, - { value: 'every', label: 'Every...' }, - { value: 'daily', label: 'Daily (at specific time)' }, - { value: 'weekly', label: 'Weekly (at specific time/days)' }, - { value: 'monthly', label: 'Monthly (at specific time/day)' }, -]; - -const customIntervalUnits: { value: CustomIntervalUnit; label: string }[] = [ - { value: 'minute', label: 'minute(s)' }, - { value: 'hour', label: 'hour(s)' }, - { value: 'day', label: 'day(s)' }, -]; - -const daysOfWeekOptions: { value: string; label: string }[] = [ - { value: '1', label: 'Mon' }, - { value: '2', label: 'Tue' }, - { value: '3', label: 'Wed' }, - { value: '4', label: 'Thu' }, - { value: '5', label: 'Fri' }, - { value: '6', label: 'Sat' }, - { value: '0', label: 'Sun' }, -]; - -const modalLabelClassName = 'block text-sm font-medium text-text-prominent mb-1'; -const cronPreviewTextColor = 'text-xs text-text-subtle mt-1'; -const cronPreviewSpecialNoteColor = 'text-xs text-text-warning mt-1'; -const checkboxLabelClassName = 'flex items-center text-sm text-text-default'; -const checkboxInputClassName = - 'h-4 w-4 text-accent-default border-border-subtle rounded focus:ring-accent-default mr-2'; - -type SourceType = 'file' | 'deeplink'; -type ExecutionMode = 'background' | 'foreground'; - -// Function to parse deep link and extract recipe config -async function parseDeepLink(deepLink: string): Promise { - try { - const url = new URL(deepLink); - if (url.protocol !== 'goose:' || (url.hostname !== 'bot' && url.hostname !== 'recipe')) { - return null; - } - - const recipeParam = url.searchParams.get('config'); - if (!recipeParam) { - return null; - } - - return await decodeRecipe(recipeParam); - } catch (error) { - console.error('Failed to parse deep link:', error); - return null; - } -} - -// Function to convert recipe to YAML with schedule configuration -function recipeToYaml(recipe: Recipe, executionMode: ExecutionMode): string { - // Create a clean recipe object for YAML conversion - const cleanRecipe: CleanRecipe = { - title: recipe.title, - description: recipe.description, - }; - - if (recipe.instructions) { - cleanRecipe.instructions = recipe.instructions; - } - - if (recipe.prompt) { - cleanRecipe.prompt = recipe.prompt; - } - - if (recipe.activities && recipe.activities.length > 0) { - cleanRecipe.activities = recipe.activities; - } - - if (recipe.extensions && recipe.extensions.length > 0) { - cleanRecipe.extensions = recipe.extensions.map((ext) => { - const cleanExt: CleanExtension = { - name: ext.name, - type: 'builtin', // Default type, will be overridden below - }; - - // Handle different extension types using type assertions - if ('type' in ext && ext.type) { - cleanExt.type = ext.type as CleanExtension['type']; - - // Use type assertions to access properties safely - const extAny = ext as Record; - - if (ext.type === 'sse' && extAny.uri) { - cleanExt.uri = extAny.uri as string; - } else if (ext.type === 'streamable_http' && extAny.uri) { - cleanExt.uri = extAny.uri as string; - } else if (ext.type === 'stdio') { - if (extAny.cmd) { - cleanExt.cmd = extAny.cmd as string; - } - if (extAny.args) { - cleanExt.args = extAny.args as string[]; - } - } else if (ext.type === 'builtin' && extAny.display_name) { - cleanExt.display_name = extAny.display_name as string; - } - - // Handle frontend type separately to avoid TypeScript narrowing issues - if ((ext.type as string) === 'frontend') { - if (extAny.tools) { - cleanExt.tools = extAny.tools as unknown[]; - } - if (extAny.instructions) { - cleanExt.instructions = extAny.instructions as string; - } - } - } else { - // Fallback: try to infer type from available fields - const extAny = ext as Record; - - if (extAny.cmd) { - cleanExt.type = 'stdio'; - cleanExt.cmd = extAny.cmd as string; - if (extAny.args) { - cleanExt.args = extAny.args as string[]; - } - } else if (extAny.command) { - // Handle legacy 'command' field by converting to 'cmd' - cleanExt.type = 'stdio'; - cleanExt.cmd = extAny.command as string; - } else if (extAny.uri) { - // Default to streamable_http for URI-based extensions for forward compatibility - cleanExt.type = 'streamable_http'; - cleanExt.uri = extAny.uri as string; - } else if (extAny.tools) { - cleanExt.type = 'frontend'; - cleanExt.tools = extAny.tools as unknown[]; - if (extAny.instructions) { - cleanExt.instructions = extAny.instructions as string; - } - } else { - // Default to builtin if we can't determine type - cleanExt.type = 'builtin'; - } - } - - // Add common optional fields - if ('env_keys' in ext && ext.env_keys && ext.env_keys.length > 0) { - cleanExt.env_keys = ext.env_keys; - } - - if ('timeout' in ext && ext.timeout) { - cleanExt.timeout = ext.timeout as number; - } - - if ('description' in ext && ext.description) { - cleanExt.description = ext.description as string; - } - - if ('bundled' in ext && ext.bundled !== undefined) { - cleanExt.bundled = ext.bundled as boolean; - } - - return cleanExt; - }); - } - - if (recipe.author) { - cleanRecipe.author = { - contact: recipe.author.contact || undefined, - metadata: recipe.author.metadata || undefined, - }; - } - - // Add schedule configuration based on execution mode - cleanRecipe.schedule = { - foreground: executionMode === 'foreground', - fallback_to_background: true, // Always allow fallback - window_title: executionMode === 'foreground' ? `${recipe.title} - Scheduled` : undefined, - }; - - return yaml.stringify(cleanRecipe); -} - -export const CreateScheduleModal: React.FC = ({ - isOpen, - onClose, - onSubmit, - isLoadingExternally, - apiErrorExternally, - initialDeepLink, -}) => { - const [scheduleId, setScheduleId] = useState(''); - const [sourceType, setSourceType] = useState('file'); - const [executionMode, setExecutionMode] = useState('background'); - const [recipeSourcePath, setRecipeSourcePath] = useState(''); - const [deepLinkInput, setDeepLinkInput] = useState(''); - const [parsedRecipe, setParsedRecipe] = useState(null); - const [frequency, setFrequency] = useState('daily'); - const [customIntervalValue, setCustomIntervalValue] = useState(1); - const [customIntervalUnit, setCustomIntervalUnit] = useState('minute'); - const [selectedDate, setSelectedDate] = useState( - () => new Date().toISOString().split('T')[0] - ); - const [selectedTime, setSelectedTime] = useState('09:00'); - const [selectedMinute, setSelectedMinute] = useState('0'); - const [selectedDaysOfWeek, setSelectedDaysOfWeek] = useState>(new Set(['1'])); - const [selectedDayOfMonth, setSelectedDayOfMonth] = useState('1'); - const [derivedCronExpression, setDerivedCronExpression] = useState(''); - const [readableCronExpression, setReadableCronExpression] = useState(''); - const [internalValidationError, setInternalValidationError] = useState(null); - - const handleDeepLinkChange = useCallback( - async (value: string) => { - setDeepLinkInput(value); - setInternalValidationError(null); - - if (value.trim()) { - try { - const recipe = await parseDeepLink(value.trim()); - if (recipe) { - setParsedRecipe(recipe); - // Auto-populate schedule ID from recipe title if available - if (recipe.title && !scheduleId) { - const cleanId = recipe.title - .toLowerCase() - .replace(/[^a-z0-9-]/g, '-') - .replace(/-+/g, '-'); - setScheduleId(cleanId); - } - } else { - setParsedRecipe(null); - setInternalValidationError( - 'Invalid deep link format. Please use a goose://bot or goose://recipe link.' - ); - } - } catch { - setParsedRecipe(null); - setInternalValidationError( - 'Failed to parse deep link. Please ensure using a goose://bot or goose://recipe link and try again.' - ); - } - } else { - setParsedRecipe(null); - } - }, - [scheduleId] - ); - - useEffect(() => { - // Check for initial deep link from props when modal opens - if (isOpen && initialDeepLink) { - setSourceType('deeplink'); - handleDeepLinkChange(initialDeepLink); - } - }, [isOpen, initialDeepLink, handleDeepLinkChange]); - - const resetForm = () => { - setScheduleId(''); - setSourceType('file'); - setExecutionMode('background'); - setRecipeSourcePath(''); - setDeepLinkInput(''); - setParsedRecipe(null); - setFrequency('daily'); - setCustomIntervalValue(1); - setCustomIntervalUnit('minute'); - setSelectedDate(new Date().toISOString().split('T')[0]); - setSelectedTime('09:00'); - setSelectedMinute('0'); - setSelectedDaysOfWeek(new Set(['1'])); - setSelectedDayOfMonth('1'); - setInternalValidationError(null); - setReadableCronExpression(''); - }; - - const handleBrowseFile = async () => { - // Default to global recipes directory, but fallback to local if needed - const defaultPath = getStorageDirectory(true); - const filePath = await window.electron.selectFileOrDirectory(defaultPath); - if (filePath) { - if (filePath.endsWith('.yaml') || filePath.endsWith('.yml')) { - setRecipeSourcePath(filePath); - setInternalValidationError(null); - } else { - setInternalValidationError('Invalid file type: Please select a YAML file (.yaml or .yml)'); - console.warn('Invalid file type: Please select a YAML file (.yaml or .yml)'); - } - } - }; - - useEffect(() => { - const generateCronExpression = (): string => { - const timeParts = selectedTime.split(':'); - const minutePart = timeParts.length > 1 ? String(parseInt(timeParts[1], 10)) : '0'; - const hourPart = timeParts.length > 0 ? String(parseInt(timeParts[0], 10)) : '0'; - if (isNaN(parseInt(minutePart)) || isNaN(parseInt(hourPart))) { - return 'Invalid time format.'; - } - - // Temporal uses 5-field cron: minute hour day month dayofweek (no seconds) - switch (frequency) { - case 'once': - if (selectedDate && selectedTime) { - try { - const dateObj = new Date(`${selectedDate}T${selectedTime}`); - if (isNaN(dateObj.getTime())) return "Invalid date/time for 'once'."; - return `${dateObj.getMinutes()} ${dateObj.getHours()} ${dateObj.getDate()} ${ - dateObj.getMonth() + 1 - } *`; - } catch { - return "Error parsing date/time for 'once'."; - } - } - return 'Date and Time are required for "Once" frequency.'; - case 'every': { - if (customIntervalValue <= 0) { - return 'Custom interval value must be greater than 0.'; - } - switch (customIntervalUnit) { - case 'minute': - return `*/${customIntervalValue} * * * *`; - case 'hour': - return `0 */${customIntervalValue} * * *`; - case 'day': - return `0 0 */${customIntervalValue} * *`; - default: - return 'Invalid custom interval unit.'; - } - } - case 'daily': - return `${minutePart} ${hourPart} * * *`; - case 'weekly': { - if (selectedDaysOfWeek.size === 0) { - return 'Select at least one day for weekly frequency.'; - } - const days = Array.from(selectedDaysOfWeek) - .sort((a, b) => parseInt(a) - parseInt(b)) - .join(','); - return `${minutePart} ${hourPart} * * ${days}`; - } - case 'monthly': { - const sDayOfMonth = parseInt(selectedDayOfMonth, 10); - if (isNaN(sDayOfMonth) || sDayOfMonth < 1 || sDayOfMonth > 31) { - return 'Invalid day of month (1-31) for monthly frequency.'; - } - return `${minutePart} ${hourPart} ${sDayOfMonth} * *`; - } - default: - return 'Invalid frequency selected.'; - } - }; - const cron = generateCronExpression(); - setDerivedCronExpression(cron); - try { - if ( - cron.includes('Invalid') || - cron.includes('required') || - cron.includes('Error') || - cron.includes('Select at least one') - ) { - setReadableCronExpression('Invalid cron details provided.'); - } else { - setReadableCronExpression(cronstrue.toString(cron)); - } - } catch { - setReadableCronExpression('Could not parse cron string.'); - } - }, [ - frequency, - customIntervalValue, - customIntervalUnit, - selectedDate, - selectedTime, - selectedMinute, - selectedDaysOfWeek, - selectedDayOfMonth, - ]); - - const handleDayOfWeekChange = (dayValue: string) => { - setSelectedDaysOfWeek((prev) => { - const newSet = new Set(prev); - if (newSet.has(dayValue)) { - newSet.delete(dayValue); - } else { - newSet.add(dayValue); - } - return newSet; - }); - }; - - const handleLocalSubmit = async (event: FormEvent) => { - event.preventDefault(); - setInternalValidationError(null); - - if (!scheduleId.trim()) { - setInternalValidationError('Schedule ID is required.'); - return; - } - - let finalRecipeSource = ''; - - if (sourceType === 'file') { - if (!recipeSourcePath) { - setInternalValidationError('Recipe source file is required.'); - return; - } - finalRecipeSource = recipeSourcePath; - } else if (sourceType === 'deeplink') { - if (!deepLinkInput.trim()) { - setInternalValidationError('Deep link is required.'); - return; - } - if (!parsedRecipe) { - setInternalValidationError('Invalid deep link. Please check the format.'); - return; - } - - try { - // Convert recipe to YAML and save to a temporary file - const yamlContent = recipeToYaml(parsedRecipe, executionMode); - console.log('Generated YAML content:', yamlContent); // Debug log - const tempFileName = `schedule-${scheduleId}-${Date.now()}.yaml`; - const tempDir = window.electron.getConfig().GOOSE_WORKING_DIR || '.'; - const tempFilePath = `${tempDir}/${tempFileName}`; - - // Write the YAML file - const writeSuccess = await window.electron.writeFile(tempFilePath, yamlContent); - if (!writeSuccess) { - setInternalValidationError('Failed to create temporary recipe file.'); - return; - } - - finalRecipeSource = tempFilePath; - } catch (error) { - console.error('Failed to convert recipe to YAML:', error); - setInternalValidationError('Failed to process the recipe from deep link.'); - return; - } - } - - if ( - !derivedCronExpression || - derivedCronExpression.includes('Invalid') || - derivedCronExpression.includes('required') || - derivedCronExpression.includes('Error') || - derivedCronExpression.includes('Select at least one') - ) { - setInternalValidationError(`Invalid cron expression: ${derivedCronExpression}`); - return; - } - if (frequency === 'weekly' && selectedDaysOfWeek.size === 0) { - setInternalValidationError('For weekly frequency, select at least one day.'); - return; - } - - const newSchedulePayload: NewSchedulePayload = { - id: scheduleId.trim(), - recipe_source: finalRecipeSource, - cron: derivedCronExpression, - execution_mode: executionMode, - }; - - await onSubmit(newSchedulePayload); - }; - - const handleClose = () => { - resetForm(); - onClose(); - }; - - if (!isOpen) return null; - - return ( -
- -
-
- Clock -

Create New Schedule

-

- Create a new schedule using the settings below to do things like automatically run - tasks or create files -

-
-
- -
- {apiErrorExternally && ( -

- {apiErrorExternally} -

- )} - {internalValidationError && ( -

- {internalValidationError} -

- )} - -
- - setScheduleId(e.target.value)} - placeholder="e.g., daily-summary-job" - required - /> -
- -
- -
-
- - -
- - {sourceType === 'file' && ( -
- - {recipeSourcePath && ( -

- Selected: {recipeSourcePath} -

- )} - {executionMode === 'foreground' && ( -
-

- Note: For foreground execution with YAML files, add this to - your recipe: -

-
-                        {`schedule:
-  foreground: true
-  fallback_to_background: true`}
-                      
-
- )} -
- )} - - {sourceType === 'deeplink' && ( -
- handleDeepLinkChange(e.target.value)} - placeholder="Paste goose://bot or goose://recipe link here..." - className="rounded-full" - /> - {parsedRecipe && ( -
-

- ✓ Recipe parsed successfully -

-

- Title: {parsedRecipe.title} -

-

- Description: {parsedRecipe.description} -

-
- )} -
- )} -
-
- -
- -
-
- - -
- -
- {executionMode === 'background' ? ( -

- Background: Runs silently in the background without opening a - window. Results are saved to session storage. -

- ) : ( -

- Foreground: Opens in a desktop window when the Goose app is - running. Falls back to background if the app is not available. -

- )} -
-
-
- -
- - setCustomIntervalValue(parseInt(e.target.value) || 1)} - required - /> -
-
- - setSelectedDate(e.target.value)} - required - /> -
-
- - setSelectedTime(e.target.value)} - required - /> -
- - )} - {(frequency === 'daily' || frequency === 'weekly' || frequency === 'monthly') && ( -
- - setSelectedTime(e.target.value)} - required - /> -
- )} - {frequency === 'weekly' && ( -
- -
- {daysOfWeekOptions.map((day) => ( - - ))} -
-
- )} - {frequency === 'monthly' && ( -
- - setSelectedDayOfMonth(e.target.value)} - required - /> -
- )} -
-

- Generated Cron:{' '} - - {derivedCronExpression} - -

-

- Human Readable: {readableCronExpression} -

-

- Syntax: M H D M DoW (M=minute, H=hour, D=day, M=month, DoW=day of week: 0/7=Sun) -

- {frequency === 'once' && ( -

- Note: "Once" schedules recur annually. True one-time tasks may need backend deletion - after execution. -

- )} -
-
- - {/* Actions */} -
- - -
-
-
- ); -}; diff --git a/ui/desktop/src/components/schedule/CronPicker.tsx b/ui/desktop/src/components/schedule/CronPicker.tsx new file mode 100644 index 000000000000..213081043ff8 --- /dev/null +++ b/ui/desktop/src/components/schedule/CronPicker.tsx @@ -0,0 +1,283 @@ +import React, { useState, useEffect } from 'react'; +import cronstrue from 'cronstrue'; +import { ScheduledJob } from '../../schedule'; + +type Period = 'minute' | 'hour' | 'day' | 'week' | 'month' | 'year'; + +type ParsedCron = { + period: Period; + second: string; + minute: string; + hour: string; + dayOfMonth: string; + month: string; + dayOfWeek: string; +}; + +interface CronPickerProps { + schedule: ScheduledJob | null; + onChange: (cron: string) => void; +} + +const parseCron = (cron: string): ParsedCron => { + const parts = cron.split(' '); + if (parts.length === 5) { + parts.unshift('0'); + } + if (parts.length !== 6) { + return { + period: 'day', + second: '0', + minute: '0', + hour: '14', + dayOfMonth: '*', + month: '*', + dayOfWeek: '*', + }; + } + + const [second, minute, hour, dayOfMonth, month, dayOfWeek] = parts; + + if (month !== '*' && dayOfMonth !== '*') { + return { period: 'year', second, minute, hour, dayOfMonth, month, dayOfWeek }; + } + if (dayOfMonth !== '*') { + return { period: 'month', second, minute, hour, dayOfMonth, month, dayOfWeek }; + } + if (dayOfWeek !== '*') { + return { period: 'week', second, minute, hour, dayOfMonth, month, dayOfWeek }; + } + if (hour !== '*') { + return { period: 'day', second, minute, hour, dayOfMonth, month, dayOfWeek }; + } + if (minute !== '*') { + return { period: 'hour', second, minute, hour, dayOfMonth, month, dayOfWeek }; + } + return { period: 'minute', second, minute, hour, dayOfMonth, month, dayOfWeek }; +}; + +const to24Hour = (hour12: number, isPM: boolean): number => { + if (hour12 === 12) { + return isPM ? 12 : 0; + } + return isPM ? hour12 + 12 : hour12; +}; + +const to12Hour = (hour24: number): { hour: number; isPM: boolean } => { + if (hour24 === 0) { + return { hour: 12, isPM: false }; + } + if (hour24 === 12) { + return { hour: 12, isPM: true }; + } + if (hour24 > 12) { + return { hour: hour24 - 12, isPM: true }; + } + return { hour: hour24, isPM: false }; +}; + +export const CronPicker: React.FC = ({ schedule, onChange }) => { + const [period, setPeriod] = useState('day'); + const [second, setSecond] = useState('0'); + const [minute, setMinute] = useState('0'); + const [hour12, setHour12] = useState(2); + const [currentCron, setCurrentCron] = useState(''); + const [isPM, setIsPM] = useState(true); + const [dayOfWeek, setDayOfWeek] = useState('1'); + const [dayOfMonth, setDayOfMonth] = useState('1'); + const [month, setMonth] = useState('1'); + + useEffect(() => { + const parsed = parseCron(schedule?.cron || ''); + setPeriod(parsed.period); + setSecond(parsed.second === '*' ? '0' : parsed.second); + setMinute(parsed.minute === '*' ? '0' : parsed.minute); + const hour24 = parsed.hour === '*' ? 14 : parseInt(parsed.hour, 10); + const { hour, isPM: pm } = to12Hour(hour24); + setHour12(hour); + setIsPM(pm); + setDayOfWeek(parsed.dayOfWeek === '*' ? '1' : parsed.dayOfWeek); + setDayOfMonth(parsed.dayOfMonth === '*' ? '1' : parsed.dayOfMonth); + setMonth(parsed.month === '*' ? '1' : parsed.month); + }, [schedule]); + + useEffect(() => { + const hour24 = to24Hour(hour12, isPM); + let cron: string; + + switch (period) { + case 'minute': + cron = `${second} * * * * *`; + break; + case 'hour': + cron = `${second} ${minute} * * * *`; + break; + case 'day': + cron = `${second} ${minute} ${hour24} * * *`; + break; + case 'week': + cron = `${second} ${minute} ${hour24} * * ${dayOfWeek}`; + break; + case 'month': + cron = `${second} ${minute} ${hour24} ${dayOfMonth} * *`; + break; + case 'year': + cron = `${second} ${minute} ${hour24} ${dayOfMonth} ${month} *`; + break; + default: + cron = '0 0 0 * * *'; + } + onChange(cron); + setCurrentCron(cron); + }, [period, second, minute, hour12, isPM, dayOfWeek, dayOfMonth, month, onChange]); + + const getReadable = () => { + if (!currentCron) { + return ''; + } + const cronWithoutSeconds = currentCron.split(' ').slice(1).join(' '); + return cronstrue.toString(cronWithoutSeconds); + }; + + const selectClassName = 'px-2 py-1 border rounded bg-white dark:bg-gray-800 dark:border-gray-600'; + + return ( +
+
+ Every + +
+ +
+ {period === 'year' && ( +
+ in + +
+ )} + + {(period === 'month' || period === 'year') && ( +
+ on day + setDayOfMonth(e.target.value)} + className="w-16 px-2 py-1 border rounded" + /> +
+ )} + + {period === 'week' && ( +
+ on + +
+ )} + + {(period === 'day' || period === 'week' || period === 'month' || period === 'year') && ( +
+ at + setHour12(parseInt(e.target.value) || 1)} + className="w-16 px-2 py-1 border rounded" + /> + : + setMinute(e.target.value.padStart(2, '0'))} + className="w-16 px-2 py-1 border rounded" + /> + +
+ )} + + {period === 'hour' && ( +
+ at minute + setMinute(e.target.value)} + className="w-16 px-2 py-1 border rounded" + /> +
+ )} + + {period === 'minute' && ( +
+ at second + setSecond(e.target.value)} + className="w-16 px-2 py-1 border rounded" + /> +
+ )} +
+ +
{getReadable()}
+
+ ); +}; diff --git a/ui/desktop/src/components/schedule/EditScheduleModal.tsx b/ui/desktop/src/components/schedule/EditScheduleModal.tsx deleted file mode 100644 index c6ca2e558082..000000000000 --- a/ui/desktop/src/components/schedule/EditScheduleModal.tsx +++ /dev/null @@ -1,551 +0,0 @@ -import React, { useState, useEffect, FormEvent } from 'react'; -import { Card } from '../ui/card'; -import { Button } from '../ui/button'; -import { Input } from '../ui/input'; -import { Select } from '../ui/Select'; -import { ScheduledJob } from '../../schedule'; -import cronstrue from 'cronstrue'; - -type FrequencyValue = 'once' | 'every' | 'daily' | 'weekly' | 'monthly'; - -type CustomIntervalUnit = 'minute' | 'hour' | 'day'; - -interface FrequencyOption { - value: FrequencyValue; - label: string; -} - -interface EditScheduleModalProps { - isOpen: boolean; - onClose: () => void; - onSubmit: (cron: string) => Promise; - schedule: ScheduledJob | null; - isLoadingExternally?: boolean; - apiErrorExternally?: string | null; -} - -const frequencies: FrequencyOption[] = [ - { value: 'once', label: 'Once' }, - { value: 'every', label: 'Every...' }, - { value: 'daily', label: 'Daily (at specific time)' }, - { value: 'weekly', label: 'Weekly (at specific time/days)' }, - { value: 'monthly', label: 'Monthly (at specific time/day)' }, -]; - -const customIntervalUnits: { value: CustomIntervalUnit; label: string }[] = [ - { value: 'minute', label: 'minute(s)' }, - { value: 'hour', label: 'hour(s)' }, - { value: 'day', label: 'day(s)' }, -]; - -const daysOfWeekOptions: { value: string; label: string }[] = [ - { value: '1', label: 'Mon' }, - { value: '2', label: 'Tue' }, - { value: '3', label: 'Wed' }, - { value: '4', label: 'Thu' }, - { value: '5', label: 'Fri' }, - { value: '6', label: 'Sat' }, - { value: '0', label: 'Sun' }, -]; - -const modalLabelClassName = 'block text-sm font-medium text-gray-700 dark:text-gray-300 mb-1'; -const cronPreviewTextColor = 'text-xs text-gray-500 dark:text-gray-400 mt-1'; -const cronPreviewSpecialNoteColor = 'text-xs text-yellow-600 dark:text-yellow-500 mt-1'; -const checkboxLabelClassName = 'flex items-center text-sm text-textStandard dark:text-gray-300'; -const checkboxInputClassName = - 'h-4 w-4 text-indigo-600 border-gray-300 dark:border-gray-600 rounded focus:ring-indigo-500 mr-2'; - -// Helper function to parse cron expression and determine frequency -const parseCronExpression = (cron: string) => { - const parts = cron.split(' '); - if (parts.length !== 5 && parts.length !== 6) return null; - - // Handle both 5-field and 6-field cron expressions - const [minutes, hours, dayOfMonth, month, dayOfWeek] = - parts.length === 5 ? parts : parts.slice(1); // Skip seconds if present - - // Check for custom intervals (every X minutes/hours/days) - if ( - minutes.startsWith('*/') && - hours === '*' && - dayOfMonth === '*' && - month === '*' && - dayOfWeek === '*' - ) { - const intervalValue = parseInt(minutes.substring(2)); - return { - frequency: 'every' as FrequencyValue, - customIntervalValue: intervalValue, - customIntervalUnit: 'minute' as CustomIntervalUnit, - }; - } - if ( - minutes === '0' && - hours.startsWith('*/') && - dayOfMonth === '*' && - month === '*' && - dayOfWeek === '*' - ) { - const intervalValue = parseInt(hours.substring(2)); - return { - frequency: 'every' as FrequencyValue, - customIntervalValue: intervalValue, - customIntervalUnit: 'hour' as CustomIntervalUnit, - }; - } - if ( - minutes === '0' && - hours === '0' && - dayOfMonth.startsWith('*/') && - month === '*' && - dayOfWeek === '*' - ) { - const intervalValue = parseInt(dayOfMonth.substring(2)); - return { - frequency: 'every' as FrequencyValue, - customIntervalValue: intervalValue, - customIntervalUnit: 'day' as CustomIntervalUnit, - }; - } - - // Check for specific patterns - if (dayOfMonth !== '*' && month !== '*' && dayOfWeek === '*') { - return { frequency: 'once' as FrequencyValue, minutes, hours, dayOfMonth, month }; - } - if ( - minutes !== '*' && - hours !== '*' && - dayOfMonth === '*' && - month === '*' && - dayOfWeek === '*' - ) { - return { frequency: 'daily' as FrequencyValue, minutes, hours }; - } - if ( - minutes !== '*' && - hours !== '*' && - dayOfMonth === '*' && - month === '*' && - dayOfWeek !== '*' - ) { - return { frequency: 'weekly' as FrequencyValue, minutes, hours, dayOfWeek }; - } - if ( - minutes !== '*' && - hours !== '*' && - dayOfMonth !== '*' && - month === '*' && - dayOfWeek === '*' - ) { - return { frequency: 'monthly' as FrequencyValue, minutes, hours, dayOfMonth }; - } - - return null; -}; - -export const EditScheduleModal: React.FC = ({ - isOpen, - onClose, - onSubmit, - schedule, - isLoadingExternally = false, - apiErrorExternally = null, -}) => { - const [frequency, setFrequency] = useState('daily'); - const [customIntervalValue, setCustomIntervalValue] = useState(1); - const [customIntervalUnit, setCustomIntervalUnit] = useState('minute'); - const [selectedDate, setSelectedDate] = useState( - () => new Date().toISOString().split('T')[0] - ); - const [selectedTime, setSelectedTime] = useState('09:00'); - const [selectedMinute] = useState('0'); - const [selectedDaysOfWeek, setSelectedDaysOfWeek] = useState>(new Set(['1'])); - const [selectedDayOfMonth, setSelectedDayOfMonth] = useState('1'); - const [derivedCronExpression, setDerivedCronExpression] = useState(''); - const [readableCronExpression, setReadableCronExpression] = useState(''); - const [internalValidationError, setInternalValidationError] = useState(null); - - // Initialize form from existing schedule - useEffect(() => { - if (schedule && isOpen) { - const parsed = parseCronExpression(schedule.cron); - - if (parsed) { - setFrequency(parsed.frequency); - - switch (parsed.frequency) { - case 'once': - // For 'once', we'd need to reconstruct the date from cron parts - // This is complex, so we'll default to current date/time for now - setSelectedDate(new Date().toISOString().split('T')[0]); - setSelectedTime( - `${parsed.hours?.padStart(2, '0')}:${parsed.minutes?.padStart(2, '0')}` - ); - break; - case 'every': - if (parsed.customIntervalValue) { - setCustomIntervalValue(parsed.customIntervalValue); - } - if (parsed.customIntervalUnit) { - setCustomIntervalUnit(parsed.customIntervalUnit); - } - break; - case 'daily': - setSelectedTime( - `${parsed.hours?.padStart(2, '0')}:${parsed.minutes?.padStart(2, '0')}` - ); - break; - case 'weekly': - setSelectedTime( - `${parsed.hours?.padStart(2, '0')}:${parsed.minutes?.padStart(2, '0')}` - ); - if (parsed.dayOfWeek) { - const days = parsed.dayOfWeek.split(',').map((d) => d.trim()); - setSelectedDaysOfWeek(new Set(days)); - } - break; - case 'monthly': - setSelectedTime( - `${parsed.hours?.padStart(2, '0')}:${parsed.minutes?.padStart(2, '0')}` - ); - setSelectedDayOfMonth(parsed.dayOfMonth || '1'); - break; - } - } else { - // If we can't parse the cron, default to daily at 9 AM - setFrequency('daily'); - setSelectedTime('09:00'); - } - - setInternalValidationError(null); - } - }, [schedule, isOpen]); - - useEffect(() => { - const generateCronExpression = (): string => { - const timeParts = selectedTime.split(':'); - const minutePart = timeParts.length > 1 ? String(parseInt(timeParts[1], 10)) : '0'; - const hourPart = timeParts.length > 0 ? String(parseInt(timeParts[0], 10)) : '0'; - if (isNaN(parseInt(minutePart)) || isNaN(parseInt(hourPart))) { - return 'Invalid time format.'; - } - switch (frequency) { - case 'once': - if (selectedDate && selectedTime) { - try { - const dateObj = new Date(`${selectedDate}T${selectedTime}`); - if (isNaN(dateObj.getTime())) return "Invalid date/time for 'once'."; - return `${dateObj.getMinutes()} ${dateObj.getHours()} ${dateObj.getDate()} ${ - dateObj.getMonth() + 1 - } *`; - } catch { - return "Error parsing date/time for 'once'."; - } - } - return 'Date and Time are required for "Once" frequency.'; - case 'every': { - if (customIntervalValue <= 0) { - return 'Custom interval value must be greater than 0.'; - } - switch (customIntervalUnit) { - case 'minute': - return `*/${customIntervalValue} * * * *`; - case 'hour': - return `0 */${customIntervalValue} * * *`; - case 'day': - return `0 0 */${customIntervalValue} * *`; - default: - return 'Invalid custom interval unit.'; - } - } - case 'daily': - return `${minutePart} ${hourPart} * * *`; - case 'weekly': { - if (selectedDaysOfWeek.size === 0) { - return 'Select at least one day for weekly frequency.'; - } - const days = Array.from(selectedDaysOfWeek) - .sort((a, b) => parseInt(a) - parseInt(b)) - .join(','); - return `${minutePart} ${hourPart} * * ${days}`; - } - case 'monthly': { - const sDayOfMonth = parseInt(selectedDayOfMonth, 10); - if (isNaN(sDayOfMonth) || sDayOfMonth < 1 || sDayOfMonth > 31) { - return 'Invalid day of month (1-31) for monthly frequency.'; - } - return `${minutePart} ${hourPart} ${sDayOfMonth} * *`; - } - default: - return 'Invalid frequency selected.'; - } - }; - const cron = generateCronExpression(); - setDerivedCronExpression(cron); - try { - if ( - cron.includes('Invalid') || - cron.includes('required') || - cron.includes('Error') || - cron.includes('Select at least one') - ) { - setReadableCronExpression('Invalid cron details provided.'); - } else { - setReadableCronExpression(cronstrue.toString(cron)); - } - } catch { - setReadableCronExpression('Could not parse cron string.'); - } - }, [ - frequency, - customIntervalValue, - customIntervalUnit, - selectedDate, - selectedTime, - selectedMinute, - selectedDaysOfWeek, - selectedDayOfMonth, - ]); - - const handleDayOfWeekChange = (dayValue: string) => { - setSelectedDaysOfWeek((prev) => { - const newSet = new Set(prev); - if (newSet.has(dayValue)) { - newSet.delete(dayValue); - } else { - newSet.add(dayValue); - } - return newSet; - }); - }; - - const handleLocalSubmit = async (event: FormEvent) => { - event.preventDefault(); - setInternalValidationError(null); - - if ( - !derivedCronExpression || - derivedCronExpression.includes('Invalid') || - derivedCronExpression.includes('required') || - derivedCronExpression.includes('Error') || - derivedCronExpression.includes('Select at least one') - ) { - setInternalValidationError(`Invalid cron expression: ${derivedCronExpression}`); - return; - } - if (frequency === 'weekly' && selectedDaysOfWeek.size === 0) { - setInternalValidationError('For weekly frequency, select at least one day.'); - return; - } - - await onSubmit(derivedCronExpression); - }; - - const handleClose = () => { - onClose(); - }; - - if (!isOpen) return null; - - return ( -
- -
-

- Edit Schedule: {schedule?.id || ''} -

-
- -
- {apiErrorExternally && ( -

- {apiErrorExternally} -

- )} - {internalValidationError && ( -

- {internalValidationError} -

- )} - -
- - setCustomIntervalValue(parseInt(e.target.value) || 1)} - required - /> -
-
- - setSelectedDate(e.target.value)} - required - /> -
-
- - setSelectedTime(e.target.value)} - required - /> -
- - )} - {(frequency === 'daily' || frequency === 'weekly' || frequency === 'monthly') && ( -
- - setSelectedTime(e.target.value)} - required - /> -
- )} - {frequency === 'weekly' && ( -
- -
- {daysOfWeekOptions.map((day) => ( - - ))} -
-
- )} - {frequency === 'monthly' && ( -
- - setSelectedDayOfMonth(e.target.value)} - required - /> -
- )} -
-

- Generated Cron:{' '} - - {derivedCronExpression} - -

-

- Human Readable: {readableCronExpression} -

-

- Syntax: M H D M DoW (M=minute, H=hour, D=day, M=month, DoW=day of week: 0/7=Sun) -

- {frequency === 'once' && ( -

- Note: "Once" schedules recur annually. True one-time tasks may need backend deletion - after execution. -

- )} -
-
- - {/* Actions */} -
- - -
-
-
- ); -}; diff --git a/ui/desktop/src/components/schedule/ScheduleDetailView.tsx b/ui/desktop/src/components/schedule/ScheduleDetailView.tsx index 7697c033babd..cf61fb4d5cd3 100644 --- a/ui/desktop/src/components/schedule/ScheduleDetailView.tsx +++ b/ui/desktop/src/components/schedule/ScheduleDetailView.tsx @@ -1,4 +1,4 @@ -import React, { useState, useEffect, useCallback, useMemo } from 'react'; +import React, { useState, useEffect } from 'react'; import { Button } from '../ui/button'; import { ScrollArea } from '../ui/scroll-area'; import BackButton from '../ui/BackButton'; @@ -15,7 +15,7 @@ import { ScheduledJob, } from '../../schedule'; import SessionHistoryView from '../sessions/SessionHistoryView'; -import { EditScheduleModal } from './EditScheduleModal'; +import { ScheduleModal, NewSchedulePayload } from './ScheduleModal'; import { toastError, toastSuccess } from '../../toasts'; import { Loader2, Pause, Play, Edit, Square, Eye } from 'lucide-react'; import cronstrue from 'cronstrue'; @@ -42,292 +42,125 @@ interface ScheduleDetailViewProps { onNavigateBack: () => void; } -// Memoized ScheduleInfoCard component to prevent unnecessary re-renders of static content -const ScheduleInfoCard = React.memo<{ - scheduleDetails: ScheduledJob; -}>(({ scheduleDetails }) => { - const readableCron = useMemo(() => { - try { - return cronstrue.toString(scheduleDetails.cron); - } catch (e) { - console.warn(`Could not parse cron string "${scheduleDetails.cron}":`, e); - return scheduleDetails.cron; - } - }, [scheduleDetails.cron]); - - const formattedLastRun = useMemo(() => { - return formatToLocalDateWithTimezone(scheduleDetails.last_run); - }, [scheduleDetails.last_run]); - - const formattedProcessStartTime = useMemo(() => { - return scheduleDetails.process_start_time - ? formatToLocalDateWithTimezone(scheduleDetails.process_start_time) - : null; - }, [scheduleDetails.process_start_time]); - - return ( - -
-
-

{scheduleDetails.id}

-
- {scheduleDetails.currently_running && ( -
- - Currently Running -
- )} - {scheduleDetails.paused && ( -
- - Paused -
- )} -
-
-

- Schedule: {readableCron} -

-

- Cron Expression: {scheduleDetails.cron} -

-

- Recipe Source: {scheduleDetails.source} -

-

- Last Run: {formattedLastRun} -

- {scheduleDetails.execution_mode && ( -

- Execution Mode:{' '} - - {scheduleDetails.execution_mode === 'foreground' ? '🖥️ Foreground' : '⚡ Background'} - -

- )} - {scheduleDetails.currently_running && scheduleDetails.current_session_id && ( -

- Current Session:{' '} - {scheduleDetails.current_session_id} -

- )} - {scheduleDetails.currently_running && formattedProcessStartTime && ( -

- Process Started: {formattedProcessStartTime} -

- )} -
-
- ); -}); - -ScheduleInfoCard.displayName = 'ScheduleInfoCard'; - const ScheduleDetailView: React.FC = ({ scheduleId, onNavigateBack }) => { const [sessions, setSessions] = useState([]); const [isLoadingSessions, setIsLoadingSessions] = useState(false); const [sessionsError, setSessionsError] = useState(null); - const [runNowLoading, setRunNowLoading] = useState(false); + const [scheduleDetails, setScheduleDetails] = useState(null); const [isLoadingSchedule, setIsLoadingSchedule] = useState(false); const [scheduleError, setScheduleError] = useState(null); - // Individual loading states for each action to prevent double-clicks - const [pauseUnpauseLoading, setPauseUnpauseLoading] = useState(false); - const [killJobLoading, setKillJobLoading] = useState(false); - const [inspectJobLoading, setInspectJobLoading] = useState(false); + const [isActionLoading, setIsActionLoading] = useState(false); - // Track if we explicitly killed a job to distinguish from natural completion - const [jobWasKilled, setJobWasKilled] = useState(false); + const [selectedSession, setSelectedSession] = useState(null); + const [isLoadingSession, setIsLoadingSession] = useState(false); + const [sessionError, setSessionError] = useState(null); - const [selectedSessionDetails, setSelectedSessionDetails] = useState(null); - const [isLoadingSessionDetails, setIsLoadingSessionDetails] = useState(false); - const [sessionDetailsError, setSessionDetailsError] = useState(null); - const [isEditModalOpen, setIsEditModalOpen] = useState(false); - const [editApiError, setEditApiError] = useState(null); - const [isEditSubmitting, setIsEditSubmitting] = useState(false); + const [isModalOpen, setIsModalOpen] = useState(false); - const fetchScheduleSessions = useCallback(async (sId: string) => { - if (!sId) return; + const fetchSessions = async (sId: string) => { setIsLoadingSessions(true); setSessionsError(null); try { - const fetchedSessions = await getScheduleSessions(sId, 20); - setSessions((prevSessions) => { - // Only update if sessions actually changed to prevent unnecessary re-renders - if (JSON.stringify(prevSessions) !== JSON.stringify(fetchedSessions)) { - return fetchedSessions as ScheduleSessionMeta[]; - } - return prevSessions; - }); + const data = await getScheduleSessions(sId, 20); + setSessions(data); } catch (err) { - console.error('Failed to fetch schedule sessions:', err); - setSessionsError(err instanceof Error ? err.message : 'Failed to fetch schedule sessions'); + setSessionsError(err instanceof Error ? err.message : 'Failed to fetch sessions'); } finally { setIsLoadingSessions(false); } - }, []); + }; - const fetchScheduleDetails = useCallback( - async (sId: string, isRefresh = false) => { - if (!sId) return; - if (!isRefresh) setIsLoadingSchedule(true); - setScheduleError(null); - try { - const allSchedules = await listSchedules(); - const schedule = allSchedules.find((s) => s.id === sId); - if (schedule) { - setScheduleDetails((prevDetails) => { - // Only update if schedule details actually changed - if (!prevDetails || JSON.stringify(prevDetails) !== JSON.stringify(schedule)) { - // Only reset runNowLoading if we explicitly killed the job - if (!schedule.currently_running && runNowLoading && jobWasKilled) { - setRunNowLoading(false); - setJobWasKilled(false); - } - return schedule; - } - return prevDetails; - }); - } else { - setScheduleError('Schedule not found'); - } - } catch (err) { - console.error('Failed to fetch schedule details:', err); - setScheduleError(err instanceof Error ? err.message : 'Failed to fetch schedule details'); - } finally { - if (!isRefresh) setIsLoadingSchedule(false); + const fetchSchedule = async (sId: string) => { + setIsLoadingSchedule(true); + setScheduleError(null); + try { + const allSchedules = await listSchedules(); + const schedule = allSchedules.find((s) => s.id === sId); + if (schedule) { + setScheduleDetails(schedule); + } else { + setScheduleError('Schedule not found'); } - }, - [runNowLoading, jobWasKilled] - ); + } catch (err) { + setScheduleError(err instanceof Error ? err.message : 'Failed to fetch schedule'); + } finally { + setIsLoadingSchedule(false); + } + }; useEffect(() => { - if (scheduleId && !selectedSessionDetails) { - fetchScheduleSessions(scheduleId); - fetchScheduleDetails(scheduleId); - } else if (!scheduleId) { - setSessions([]); - setSessionsError(null); - setRunNowLoading(false); - setSelectedSessionDetails(null); - setScheduleDetails(null); - setScheduleError(null); - setJobWasKilled(false); // Reset kill flag when changing schedules + if (scheduleId && !selectedSession) { + fetchSessions(scheduleId); + fetchSchedule(scheduleId); } - }, [scheduleId, fetchScheduleSessions, fetchScheduleDetails, selectedSessionDetails]); + }, [scheduleId, selectedSession]); const handleRunNow = async () => { if (!scheduleId) return; - setRunNowLoading(true); + setIsActionLoading(true); try { const newSessionId = await runScheduleNow(scheduleId); if (newSessionId === 'CANCELLED') { - toastSuccess({ - title: 'Job Cancelled', - msg: 'The job was cancelled while starting up.', - }); + toastSuccess({ title: 'Job Cancelled', msg: 'The job was cancelled while starting up.' }); } else { - toastSuccess({ - title: 'Schedule Triggered', - msg: `Successfully triggered schedule. New session ID: ${newSessionId}`, - }); + toastSuccess({ title: 'Schedule Triggered', msg: `New session: ${newSessionId}` }); } - setTimeout(() => { - if (scheduleId) { - fetchScheduleSessions(scheduleId); - fetchScheduleDetails(scheduleId); - } - }, 1000); + await fetchSessions(scheduleId); + await fetchSchedule(scheduleId); } catch (err) { - console.error('Failed to run schedule now:', err); - const errorMsg = err instanceof Error ? err.message : 'Failed to trigger schedule'; - toastError({ title: 'Run Schedule Error', msg: errorMsg }); - } finally { - setRunNowLoading(false); - } - }; - - const handlePauseSchedule = async () => { - if (!scheduleId) return; - setPauseUnpauseLoading(true); - try { - await pauseSchedule(scheduleId); - toastSuccess({ - title: 'Schedule Paused', - msg: `Successfully paused schedule "${scheduleId}"`, + toastError({ + title: 'Run Schedule Error', + msg: err instanceof Error ? err.message : 'Failed to trigger schedule', }); - fetchScheduleDetails(scheduleId); - } catch (err) { - console.error('Failed to pause schedule:', err); - const errorMsg = err instanceof Error ? err.message : 'Failed to pause schedule'; - toastError({ title: 'Pause Schedule Error', msg: errorMsg }); } finally { - setPauseUnpauseLoading(false); + setIsActionLoading(false); } }; - const handleUnpauseSchedule = async () => { - if (!scheduleId) return; - setPauseUnpauseLoading(true); + const handlePauseToggle = async () => { + if (!scheduleId || !scheduleDetails) return; + setIsActionLoading(true); try { - await unpauseSchedule(scheduleId); - toastSuccess({ - title: 'Schedule Unpaused', - msg: `Successfully unpaused schedule "${scheduleId}"`, - }); - fetchScheduleDetails(scheduleId); + if (scheduleDetails.paused) { + await unpauseSchedule(scheduleId); + toastSuccess({ title: 'Schedule Unpaused', msg: `Unpaused "${scheduleId}"` }); + } else { + await pauseSchedule(scheduleId); + toastSuccess({ title: 'Schedule Paused', msg: `Paused "${scheduleId}"` }); + } + await fetchSchedule(scheduleId); } catch (err) { - console.error('Failed to unpause schedule:', err); - const errorMsg = err instanceof Error ? err.message : 'Failed to unpause schedule'; - toastError({ title: 'Unpause Schedule Error', msg: errorMsg }); + toastError({ + title: 'Pause/Unpause Error', + msg: err instanceof Error ? err.message : 'Operation failed', + }); } finally { - setPauseUnpauseLoading(false); + setIsActionLoading(false); } }; - const handleOpenEditModal = () => { - setEditApiError(null); - setIsEditModalOpen(true); - }; - - const handleCloseEditModal = () => { - setIsEditModalOpen(false); - setEditApiError(null); - }; - - const handleKillRunningJob = async () => { + const handleKill = async () => { if (!scheduleId) return; - setKillJobLoading(true); + setIsActionLoading(true); try { const result = await killRunningJob(scheduleId); - toastSuccess({ - title: 'Job Killed', - msg: result.message, - }); - // Mark that we explicitly killed this job - setJobWasKilled(true); - // Clear the runNowLoading state immediately when job is killed - setRunNowLoading(false); - fetchScheduleDetails(scheduleId); + toastSuccess({ title: 'Job Killed', msg: result.message }); + await fetchSchedule(scheduleId); } catch (err) { - console.error('Failed to kill running job:', err); - const errorMsg = err instanceof Error ? err.message : 'Failed to kill running job'; - toastError({ title: 'Kill Job Error', msg: errorMsg }); + toastError({ + title: 'Kill Job Error', + msg: err instanceof Error ? err.message : 'Failed to kill job', + }); } finally { - setKillJobLoading(false); + setIsActionLoading(false); } }; - const handleInspectRunningJob = async () => { + const handleInspect = async () => { if (!scheduleId) return; - setInspectJobLoading(true); + setIsActionLoading(true); try { const result = await inspectRunningJob(scheduleId); if (result.sessionId) { @@ -339,130 +172,62 @@ const ScheduleDetailView: React.FC = ({ scheduleId, onN msg: `Session: ${result.sessionId}\nRunning for: ${duration}`, }); } else { - toastSuccess({ - title: 'Job Inspection', - msg: 'No detailed information available for this job', - }); + toastSuccess({ title: 'Job Inspection', msg: 'No detailed information available' }); } } catch (err) { - console.error('Failed to inspect running job:', err); - const errorMsg = err instanceof Error ? err.message : 'Failed to inspect running job'; - toastError({ title: 'Inspect Job Error', msg: errorMsg }); + toastError({ + title: 'Inspect Job Error', + msg: err instanceof Error ? err.message : 'Failed to inspect job', + }); } finally { - setInspectJobLoading(false); + setIsActionLoading(false); } }; - const handleEditScheduleSubmit = async (cron: string) => { + const handleModalSubmit = async (payload: NewSchedulePayload | string) => { if (!scheduleId) return; - - setIsEditSubmitting(true); - setEditApiError(null); + setIsActionLoading(true); try { - await updateSchedule(scheduleId, cron); - toastSuccess({ - title: 'Schedule Updated', - msg: `Successfully updated schedule "${scheduleId}"`, - }); - fetchScheduleDetails(scheduleId); - setIsEditModalOpen(false); + await updateSchedule(scheduleId, payload as string); + toastSuccess({ title: 'Schedule Updated', msg: `Updated "${scheduleId}"` }); + await fetchSchedule(scheduleId); + setIsModalOpen(false); } catch (err) { - console.error('Failed to update schedule:', err); - const errorMsg = err instanceof Error ? err.message : 'Failed to update schedule'; - setEditApiError(errorMsg); - toastError({ title: 'Update Schedule Error', msg: errorMsg }); + toastError({ + title: 'Update Schedule Error', + msg: err instanceof Error ? err.message : 'Failed to update schedule', + }); } finally { - setIsEditSubmitting(false); + setIsActionLoading(false); } }; - // Optimized periodic refresh for schedule details to keep the running status up to date - useEffect(() => { - if (!scheduleId) return; - - // Initial fetch - fetchScheduleDetails(scheduleId); - - // Set up periodic refresh every 8 seconds (longer to reduce flashing) - const intervalId = setInterval(() => { - if ( - scheduleId && - !selectedSessionDetails && - !runNowLoading && - !pauseUnpauseLoading && - !killJobLoading && - !inspectJobLoading && - !isEditSubmitting - ) { - fetchScheduleDetails(scheduleId, true); // Pass true to indicate this is a refresh - } - }, 8000); - - // Clean up on unmount or when scheduleId changes - return () => { - clearInterval(intervalId); - }; - }, [ - scheduleId, - fetchScheduleDetails, - selectedSessionDetails, - runNowLoading, - pauseUnpauseLoading, - killJobLoading, - inspectJobLoading, - isEditSubmitting, - ]); - - // Monitor schedule state changes and reset loading states appropriately - useEffect(() => { - if (scheduleDetails) { - // Only reset runNowLoading if we explicitly killed the job - // This prevents interfering with natural job completion - if (!scheduleDetails.currently_running && runNowLoading && jobWasKilled) { - setRunNowLoading(false); - setJobWasKilled(false); // Reset the flag - } - } - }, [scheduleDetails, runNowLoading, jobWasKilled]); - - const loadAndShowSessionDetails = async (sessionId: string) => { - setIsLoadingSessionDetails(true); - setSessionDetailsError(null); - setSelectedSessionDetails(null); + const loadSession = async (sessionId: string) => { + setIsLoadingSession(true); + setSessionError(null); try { const response = await getSession({ path: { session_id: sessionId }, throwOnError: true, }); - setSelectedSessionDetails(response.data); + setSelectedSession(response.data); } catch (err) { - console.error(`Failed to load session details for ${sessionId}:`, err); - const errorMsg = err instanceof Error ? err.message : 'Failed to load session details.'; - setSessionDetailsError(errorMsg); - toastError({ - title: 'Failed to load session details', - msg: errorMsg, - }); + const msg = err instanceof Error ? err.message : 'Failed to load session'; + setSessionError(msg); + toastError({ title: 'Failed to load session', msg }); } finally { - setIsLoadingSessionDetails(false); + setIsLoadingSession(false); } }; - const handleSessionCardClick = (sessionIdFromCard: string) => { - loadAndShowSessionDetails(sessionIdFromCard); - }; - - if (selectedSessionDetails) { + if (selectedSession) { return ( { - setSelectedSessionDetails(null); - setSessionDetailsError(null); - }} - onRetry={() => loadAndShowSessionDetails(selectedSessionDetails?.id)} + session={selectedSession} + isLoading={isLoadingSession} + error={sessionError} + onBack={() => setSelectedSession(null)} + onRetry={() => loadSession(selectedSession.id)} showActionButtons={true} /> ); @@ -473,13 +238,21 @@ const ScheduleDetailView: React.FC = ({ scheduleId, onN

Schedule Not Found

-

- No schedule ID was provided. Please return to the schedules list and select a schedule. -

+

No schedule ID provided. Return to schedules list.

); } + const readableCron = scheduleDetails + ? (() => { + try { + return cronstrue.toString(scheduleDetails.cron); + } catch { + return scheduleDetails.cron; + } + })() + : ''; + return (
@@ -494,7 +267,7 @@ const ScheduleDetailView: React.FC = ({ scheduleId, onN

Schedule Information

{isLoadingSchedule && (
- Loading schedule details... + Loading schedule...
)} {scheduleError && ( @@ -502,8 +275,55 @@ const ScheduleDetailView: React.FC = ({ scheduleId, onN Error: {scheduleError}

)} - {!isLoadingSchedule && !scheduleError && scheduleDetails && ( - + {scheduleDetails && ( + +
+
+

+ {scheduleDetails.id} +

+
+ {scheduleDetails.currently_running && ( +
+ + Currently Running +
+ )} + {scheduleDetails.paused && ( +
+ + Paused +
+ )} +
+
+

+ Schedule: {readableCron} +

+

+ Cron Expression: {scheduleDetails.cron} +

+

+ Recipe Source: {scheduleDetails.source} +

+

+ Last Run:{' '} + {formatToLocalDateWithTimezone(scheduleDetails.last_run)} +

+ {scheduleDetails.currently_running && scheduleDetails.current_session_id && ( +

+ Current Session:{' '} + {scheduleDetails.current_session_id} +

+ )} + {scheduleDetails.currently_running && scheduleDetails.process_start_time && ( +

+ Process Started:{' '} + {formatToLocalDateWithTimezone(scheduleDetails.process_start_time)} +

+ )} +
+
)} @@ -512,67 +332,67 @@ const ScheduleDetailView: React.FC = ({ scheduleId, onN
{scheduleDetails && !scheduleDetails.currently_running && ( <> )} - {scheduleDetails && scheduleDetails.currently_running && ( + {scheduleDetails?.currently_running && ( <> )} @@ -593,41 +413,32 @@ const ScheduleDetailView: React.FC = ({ scheduleId, onN
-

- Recent Sessions for this Schedule -

+

Recent Sessions

{isLoadingSessions &&

Loading sessions...

} {sessionsError && (

Error: {sessionsError}

)} - {!isLoadingSessions && !sessionsError && sessions.length === 0 && ( + {!isLoadingSessions && sessions.length === 0 && (

No sessions found for this schedule.

)} - {!isLoadingSessions && sessions.length > 0 && ( + {sessions.length > 0 && (
{sessions.map((session) => ( handleSessionCardClick(session.id)} - role="button" - tabIndex={0} - onKeyPress={(e) => { - if (e.key === 'Enter' || e.key === ' ') { - handleSessionCardClick(session.id); - } - }} + onClick={() => loadSession(session.id)} >

- {session.name || `Session ID: ${session.id}`}{' '} + {session.name || `Session ID: ${session.id}`}

Created:{' '} @@ -662,13 +473,15 @@ const ScheduleDetailView: React.FC = ({ scheduleId, onN

- setIsModalOpen(false)} + onSubmit={handleModalSubmit} schedule={scheduleDetails} - isLoadingExternally={isEditSubmitting} - apiErrorExternally={editApiError} + isLoadingExternally={isActionLoading} + apiErrorExternally={null} + initialDeepLink={null} />
); diff --git a/ui/desktop/src/components/schedule/ScheduleModal.tsx b/ui/desktop/src/components/schedule/ScheduleModal.tsx new file mode 100644 index 000000000000..e6c25935ba26 --- /dev/null +++ b/ui/desktop/src/components/schedule/ScheduleModal.tsx @@ -0,0 +1,502 @@ +import React, { useState, useEffect, FormEvent, useCallback } from 'react'; +import { Card } from '../ui/card'; +import { Button } from '../ui/button'; +import { Input } from '../ui/input'; +import { ScheduledJob } from '../../schedule'; +import { CronPicker } from './CronPicker'; +import { Recipe, decodeRecipe } from '../../recipe'; +import { getStorageDirectory } from '../../recipe/recipe_management'; +import ClockIcon from '../../assets/clock-icon.svg'; +import * as yaml from 'yaml'; + +export interface NewSchedulePayload { + id: string; + recipe_source: string; + cron: string; + execution_mode?: string; +} + +interface ScheduleModalProps { + isOpen: boolean; + onClose: () => void; + onSubmit: (payload: NewSchedulePayload | string) => Promise; + schedule: ScheduledJob | null; + isLoadingExternally: boolean; + apiErrorExternally: string | null; + initialDeepLink: string | null; +} + +type SourceType = 'file' | 'deeplink'; + +interface CleanExtension { + name: string; + type: 'stdio' | 'sse' | 'builtin' | 'frontend' | 'streamable_http'; + cmd?: string; + args?: string[]; + uri?: string; + display_name?: string; + tools?: unknown[]; + instructions?: string; + env_keys?: string[]; + timeout?: number; + description?: string; + bundled?: boolean; +} + +interface CleanRecipe { + title: string; + description: string; + instructions?: string; + prompt?: string; + activities?: string[]; + extensions?: CleanExtension[]; + author?: { + contact?: string; + metadata?: string; + }; + schedule?: { + window_title?: string; + working_directory?: string; + }; +} + +async function parseDeepLink(deepLink: string): Promise { + try { + const url = new URL(deepLink); + if (url.protocol !== 'goose:' || (url.hostname !== 'bot' && url.hostname !== 'recipe')) { + return null; + } + + const recipeParam = url.searchParams.get('config'); + if (!recipeParam) { + return null; + } + + return await decodeRecipe(recipeParam); + } catch (error) { + console.error('Failed to parse deep link:', error); + return null; + } +} + +function recipeToYaml(recipe: Recipe): string { + const cleanRecipe: CleanRecipe = { + title: recipe.title, + description: recipe.description, + }; + + if (recipe.instructions) { + cleanRecipe.instructions = recipe.instructions; + } + + if (recipe.prompt) { + cleanRecipe.prompt = recipe.prompt; + } + + if (recipe.activities && recipe.activities.length > 0) { + cleanRecipe.activities = recipe.activities; + } + + if (recipe.extensions && recipe.extensions.length > 0) { + cleanRecipe.extensions = recipe.extensions.map((ext) => { + const cleanExt: CleanExtension = { + name: ext.name, + type: 'builtin', + }; + + if ('type' in ext && ext.type) { + cleanExt.type = ext.type as CleanExtension['type']; + + const extAny = ext as Record; + + if (ext.type === 'sse' && extAny.uri) { + cleanExt.uri = extAny.uri as string; + } else if (ext.type === 'streamable_http' && extAny.uri) { + cleanExt.uri = extAny.uri as string; + } else if (ext.type === 'stdio') { + if (extAny.cmd) { + cleanExt.cmd = extAny.cmd as string; + } + if (extAny.args) { + cleanExt.args = extAny.args as string[]; + } + } else if (ext.type === 'builtin' && extAny.display_name) { + cleanExt.display_name = extAny.display_name as string; + } + + if ((ext.type as string) === 'frontend') { + if (extAny.tools) { + cleanExt.tools = extAny.tools as unknown[]; + } + if (extAny.instructions) { + cleanExt.instructions = extAny.instructions as string; + } + } + } else { + const extAny = ext as Record; + + if (extAny.cmd) { + cleanExt.type = 'stdio'; + cleanExt.cmd = extAny.cmd as string; + if (extAny.args) { + cleanExt.args = extAny.args as string[]; + } + } else if (extAny.command) { + cleanExt.type = 'stdio'; + cleanExt.cmd = extAny.command as string; + } else if (extAny.uri) { + cleanExt.type = 'streamable_http'; + cleanExt.uri = extAny.uri as string; + } else if (extAny.tools) { + cleanExt.type = 'frontend'; + cleanExt.tools = extAny.tools as unknown[]; + if (extAny.instructions) { + cleanExt.instructions = extAny.instructions as string; + } + } else { + cleanExt.type = 'builtin'; + } + } + + if ('env_keys' in ext && ext.env_keys && ext.env_keys.length > 0) { + cleanExt.env_keys = ext.env_keys; + } + + if ('timeout' in ext && ext.timeout) { + cleanExt.timeout = ext.timeout as number; + } + + if ('description' in ext && ext.description) { + cleanExt.description = ext.description as string; + } + + if ('bundled' in ext && ext.bundled !== undefined) { + cleanExt.bundled = ext.bundled as boolean; + } + + return cleanExt; + }); + } + + if (recipe.author) { + cleanRecipe.author = { + contact: recipe.author.contact || undefined, + metadata: recipe.author.metadata || undefined, + }; + } + + cleanRecipe.schedule = { + window_title: `${recipe.title} - Scheduled`, + }; + + return yaml.stringify(cleanRecipe); +} + +const modalLabelClassName = 'block text-sm font-medium text-text-prominent mb-1'; + +export const ScheduleModal: React.FC = ({ + isOpen, + onClose, + onSubmit, + schedule, + isLoadingExternally, + apiErrorExternally, + initialDeepLink, +}) => { + const isEditMode = !!schedule; + + const [scheduleId, setScheduleId] = useState(''); + const [sourceType, setSourceType] = useState('file'); + const [recipeSourcePath, setRecipeSourcePath] = useState(''); + const [deepLinkInput, setDeepLinkInput] = useState(''); + const [parsedRecipe, setParsedRecipe] = useState(null); + const [cronExpression, setCronExpression] = useState('0 0 14 * * *'); + const [internalValidationError, setInternalValidationError] = useState(null); + + const handleDeepLinkChange = useCallback(async (value: string) => { + setDeepLinkInput(value); + setInternalValidationError(null); + + if (value.trim()) { + try { + const recipe = await parseDeepLink(value.trim()); + if (recipe) { + setParsedRecipe(recipe); + if (recipe.title) { + const cleanId = recipe.title + .toLowerCase() + .replace(/[^a-z0-9-]/g, '-') + .replace(/-+/g, '-'); + setScheduleId(cleanId); + } + } else { + setParsedRecipe(null); + setInternalValidationError( + 'Invalid deep link format. Please use a goose://bot or goose://recipe link.' + ); + } + } catch { + setParsedRecipe(null); + setInternalValidationError( + 'Failed to parse deep link. Please ensure using a goose://bot or goose://recipe link and try again.' + ); + } + } else { + setParsedRecipe(null); + } + }, []); + + useEffect(() => { + if (isOpen) { + if (schedule) { + setScheduleId(schedule.id); + setCronExpression(schedule.cron); + } else { + setScheduleId(''); + setSourceType('file'); + setRecipeSourcePath(''); + setDeepLinkInput(''); + setParsedRecipe(null); + setCronExpression('0 0 14 * * *'); + setInternalValidationError(null); + if (initialDeepLink) { + setSourceType('deeplink'); + handleDeepLinkChange(initialDeepLink); + } + } + } + }, [isOpen, schedule, initialDeepLink, handleDeepLinkChange]); + + const handleBrowseFile = async () => { + const defaultPath = getStorageDirectory(true); + const filePath = await window.electron.selectFileOrDirectory(defaultPath); + if (filePath) { + if (filePath.endsWith('.yaml') || filePath.endsWith('.yml')) { + setRecipeSourcePath(filePath); + setInternalValidationError(null); + } else { + setInternalValidationError('Invalid file type: Please select a YAML file (.yaml or .yml)'); + } + } + }; + + const handleLocalSubmit = async (event: FormEvent) => { + event.preventDefault(); + setInternalValidationError(null); + + if (isEditMode) { + await onSubmit(cronExpression); + return; + } + + if (!scheduleId.trim()) { + setInternalValidationError('Schedule ID is required.'); + return; + } + + let finalRecipeSource = ''; + + if (sourceType === 'file') { + if (!recipeSourcePath) { + setInternalValidationError('Recipe source file is required.'); + return; + } + finalRecipeSource = recipeSourcePath; + } else if (sourceType === 'deeplink') { + if (!deepLinkInput.trim()) { + setInternalValidationError('Deep link is required.'); + return; + } + if (!parsedRecipe) { + setInternalValidationError('Invalid deep link. Please check the format.'); + return; + } + + try { + const yamlContent = recipeToYaml(parsedRecipe); + const tempFileName = `schedule-${scheduleId}-${Date.now()}.yaml`; + const tempDir = window.electron.getConfig().GOOSE_WORKING_DIR || '.'; + const tempFilePath = `${tempDir}/${tempFileName}`; + + const writeSuccess = await window.electron.writeFile(tempFilePath, yamlContent); + if (!writeSuccess) { + setInternalValidationError('Failed to create temporary recipe file.'); + return; + } + + finalRecipeSource = tempFilePath; + } catch (error) { + console.error('Failed to convert recipe to YAML:', error); + setInternalValidationError('Failed to process the recipe from deep link.'); + return; + } + } + + const newSchedulePayload: NewSchedulePayload = { + id: scheduleId.trim(), + recipe_source: finalRecipeSource, + cron: cronExpression, + }; + + await onSubmit(newSchedulePayload); + }; + + if (!isOpen) return null; + + return ( +
+ +
+
+ Clock +
+

+ {isEditMode ? 'Edit Schedule' : 'Create New Schedule'} +

+ {isEditMode &&

{schedule.id}

} +
+
+
+ +
+ {apiErrorExternally && ( +

+ {apiErrorExternally} +

+ )} + {internalValidationError && ( +

+ {internalValidationError} +

+ )} + + {!isEditMode && ( + <> +
+ + setScheduleId(e.target.value)} + placeholder="e.g., daily-summary-job" + required + /> +
+ +
+ +
+
+ + +
+ + {sourceType === 'file' && ( +
+ + {recipeSourcePath && ( +

+ Selected: {recipeSourcePath} +

+ )} +
+ )} + + {sourceType === 'deeplink' && ( +
+ handleDeepLinkChange(e.target.value)} + placeholder="Paste goose://bot or goose://recipe link here..." + className="rounded-full" + /> + {parsedRecipe && ( +
+

+ ✓ Recipe parsed successfully +

+

+ Title: {parsedRecipe.title} +

+

+ Description: {parsedRecipe.description} +

+
+ )} +
+ )} +
+
+ + )} + +
+ + +
+
+ +
+ + +
+
+
+ ); +}; diff --git a/ui/desktop/src/components/schedule/SchedulesView.tsx b/ui/desktop/src/components/schedule/SchedulesView.tsx index 885bf1ff4cc5..818e0df2256a 100644 --- a/ui/desktop/src/components/schedule/SchedulesView.tsx +++ b/ui/desktop/src/components/schedule/SchedulesView.tsx @@ -1,4 +1,4 @@ -import React, { useState, useEffect, useCallback, useMemo } from 'react'; +import React, { useState, useEffect } from 'react'; import { useLocation } from 'react-router-dom'; import { listSchedules, @@ -16,8 +16,7 @@ import { Card } from '../ui/card'; import { Button } from '../ui/button'; import { TrashIcon } from '../icons/TrashIcon'; import { Plus, RefreshCw, Pause, Play, Edit, Square, Eye, CircleDotDashed } from 'lucide-react'; -import { CreateScheduleModal, NewSchedulePayload } from './CreateScheduleModal'; -import { EditScheduleModal } from './EditScheduleModal'; +import { NewSchedulePayload, ScheduleModal } from './ScheduleModal'; import ScheduleDetailView from './ScheduleDetailView'; import { toastError, toastSuccess } from '../../toasts'; import cronstrue from 'cronstrue'; @@ -29,8 +28,7 @@ interface SchedulesViewProps { onClose?: () => void; } -// Memoized ScheduleCard component to prevent unnecessary re-renders -const ScheduleCard = React.memo<{ +const ScheduleCard: React.FC<{ job: ScheduledJob; onNavigateToDetail: (id: string) => void; onEdit: (job: ScheduledJob) => void; @@ -39,177 +37,150 @@ const ScheduleCard = React.memo<{ onKill: (id: string) => void; onInspect: (id: string) => void; onDelete: (id: string) => void; - isPausing: boolean; - isDeleting: boolean; - isKilling: boolean; - isInspecting: boolean; - isSubmitting: boolean; -}>( - ({ - job, - onNavigateToDetail, - onEdit, - onPause, - onUnpause, - onKill, - onInspect, - onDelete, - isPausing, - isDeleting, - isKilling, - isInspecting, - isSubmitting, - }) => { - const readableCron = useMemo(() => { - try { - return cronstrue.toString(job.cron); - } catch (e) { - console.warn(`Could not parse cron string "${job.cron}":`, e); - return job.cron; - } - }, [job.cron]); - - const formattedLastRun = useMemo(() => { - return formatToLocalDateWithTimezone(job.last_run); - }, [job.last_run]); + actionInProgress: boolean; +}> = ({ + job, + onNavigateToDetail, + onEdit, + onPause, + onUnpause, + onKill, + onInspect, + onDelete, + actionInProgress, +}) => { + let readableCron: string; + try { + readableCron = cronstrue.toString(job.cron); + } catch { + readableCron = job.cron; + } - return ( - onNavigateToDetail(job.id)} - > -
-
-
-

- {job.id} -

- {job.execution_mode && ( - - {job.execution_mode === 'foreground' ? '🖥️' : '⚡'} - - )} - {job.currently_running && ( - - - Running - - )} - {job.paused && ( - - - Paused - - )} -
-

- {readableCron} -

-
- Last run: {formattedLastRun} -
-
+ const formattedLastRun = formatToLocalDateWithTimezone(job.last_run); -
- {!job.currently_running && ( - <> - - - - )} + return ( + onNavigateToDetail(job.id)} + > +
+
+
+

+ {job.id} +

{job.currently_running && ( - <> - - - + + + Running + + )} + {job.paused && ( + + + Paused + )} - +
+

+ {readableCron} +

+
+ Last run: {formattedLastRun}
- - ); - } -); -ScheduleCard.displayName = 'ScheduleCard'; +
+ {!job.currently_running && ( + <> + + + + )} + {job.currently_running && ( + <> + + + + )} + +
+
+
+ ); +}; const SchedulesView: React.FC = ({ onClose: _onClose }) => { const location = useLocation(); @@ -218,33 +189,19 @@ const SchedulesView: React.FC = ({ onClose: _onClose }) => { const [isSubmitting, setIsSubmitting] = useState(false); const [apiError, setApiError] = useState(null); const [submitApiError, setSubmitApiError] = useState(null); - const [isCreateModalOpen, setIsCreateModalOpen] = useState(false); - const [isEditModalOpen, setIsEditModalOpen] = useState(false); + const [isModalOpen, setIsModalOpen] = useState(false); const [editingSchedule, setEditingSchedule] = useState(null); const [isRefreshing, setIsRefreshing] = useState(false); const [pendingDeepLink, setPendingDeepLink] = useState(null); - - // Individual loading states for each action to prevent double-clicks - const [pausingScheduleIds, setPausingScheduleIds] = useState>(new Set()); - const [deletingScheduleIds, setDeletingScheduleIds] = useState>(new Set()); - const [killingScheduleIds, setKillingScheduleIds] = useState>(new Set()); - const [inspectingScheduleIds, setInspectingScheduleIds] = useState>(new Set()); - + const [actionsInProgress, setActionsInProgress] = useState>(new Set()); const [viewingScheduleId, setViewingScheduleId] = useState(null); - // Memoized fetch function to prevent unnecessary re-creation - const fetchSchedules = useCallback(async (isRefresh = false) => { - if (!isRefresh) setIsLoading(true); + const fetchSchedules = async () => { + setIsLoading(true); setApiError(null); try { const fetchedSchedules = await listSchedules(); - setSchedules((prevSchedules) => { - // Only update if schedules actually changed to prevent unnecessary re-renders - if (JSON.stringify(prevSchedules) !== JSON.stringify(fetchedSchedules)) { - return fetchedSchedules; - } - return prevSchedules; - }); + setSchedules(fetchedSchedules); } catch (error) { console.error('Failed to fetch schedules:', error); setApiError( @@ -253,266 +210,181 @@ const SchedulesView: React.FC = ({ onClose: _onClose }) => { : 'An unknown error occurred while fetching schedules.' ); } finally { - if (!isRefresh) setIsLoading(false); + setIsLoading(false); } - }, []); + }; useEffect(() => { if (viewingScheduleId === null) { fetchSchedules(); - // Check for pending deep link from navigation state const locationState = location.state as ViewOptions | null; if (locationState?.pendingScheduleDeepLink) { setPendingDeepLink(locationState.pendingScheduleDeepLink); - setIsCreateModalOpen(true); - // Clear the state after reading it + setIsModalOpen(true); window.history.replaceState({}, document.title); } } - }, [viewingScheduleId, fetchSchedules, location.state]); + }, [viewingScheduleId, location.state]); - // Optimized periodic refresh - only refresh if not actively doing something useEffect(() => { - if (viewingScheduleId !== null) return; + if (viewingScheduleId !== null || actionsInProgress.size > 0) return; - // Set up periodic refresh every 15 seconds (increased from 8 to reduce flashing) const intervalId = setInterval(() => { - if ( - viewingScheduleId === null && - !isRefreshing && - !isLoading && - !isSubmitting && - pausingScheduleIds.size === 0 && - deletingScheduleIds.size === 0 && - killingScheduleIds.size === 0 && - inspectingScheduleIds.size === 0 - ) { - fetchSchedules(true); // Pass true to indicate this is a refresh + if (viewingScheduleId === null && !isRefreshing && !isLoading && !isSubmitting) { + fetchSchedules(); } - }, 15000); // Increased from 8000 to 15000 (15 seconds) - - // Clean up on unmount - return () => { - clearInterval(intervalId); - }; - }, [ - viewingScheduleId, - isRefreshing, - isLoading, - isSubmitting, - pausingScheduleIds.size, - deletingScheduleIds.size, - killingScheduleIds.size, - inspectingScheduleIds.size, - fetchSchedules, - ]); - - const handleOpenCreateModal = () => { - setSubmitApiError(null); - setIsCreateModalOpen(true); - }; + }, 15000); + + return () => clearInterval(intervalId); + }, [viewingScheduleId, isRefreshing, isLoading, isSubmitting, actionsInProgress.size]); - const handleRefresh = useCallback(async () => { + const handleRefresh = async () => { setIsRefreshing(true); try { await fetchSchedules(); } finally { setIsRefreshing(false); } - }, [fetchSchedules]); - - const handleCloseCreateModal = () => { - setIsCreateModalOpen(false); - setSubmitApiError(null); - setPendingDeepLink(null); - }; - - const handleOpenEditModal = (schedule: ScheduledJob) => { - setEditingSchedule(schedule); - setSubmitApiError(null); - setIsEditModalOpen(true); }; - const handleCloseEditModal = () => { - setIsEditModalOpen(false); - setEditingSchedule(null); - setSubmitApiError(null); - }; - - const handleCreateScheduleSubmit = async (payload: NewSchedulePayload) => { + const handleModalSubmit = async (payload: NewSchedulePayload | string) => { setIsSubmitting(true); setSubmitApiError(null); try { - await createSchedule(payload); - await fetchSchedules(); - setIsCreateModalOpen(false); - } catch (error) { - console.error('Failed to create schedule:', error); - const errorMessage = - error instanceof Error ? error.message : 'Unknown error creating schedule.'; - setSubmitApiError(errorMessage); - } finally { - setIsSubmitting(false); - } - }; - - const handleEditScheduleSubmit = async (cron: string) => { - if (!editingSchedule) return; - - setIsSubmitting(true); - setSubmitApiError(null); - try { - await updateSchedule(editingSchedule.id, cron); - toastSuccess({ - title: 'Schedule Updated', - msg: `Successfully updated schedule "${editingSchedule.id}"`, - }); + if (editingSchedule) { + await updateSchedule(editingSchedule.id, payload as string); + toastSuccess({ + title: 'Schedule Updated', + msg: `Successfully updated schedule "${editingSchedule.id}"`, + }); + } else { + await createSchedule(payload as NewSchedulePayload); + } await fetchSchedules(); - setIsEditModalOpen(false); + setIsModalOpen(false); setEditingSchedule(null); } catch (error) { - console.error('Failed to update schedule:', error); - const errorMessage = - error instanceof Error ? error.message : 'Unknown error updating schedule.'; - setSubmitApiError(errorMessage); - toastError({ - title: 'Update Schedule Error', - msg: errorMessage, - }); + console.error('Failed to save schedule:', error); + setSubmitApiError(error instanceof Error ? error.message : 'Unknown error saving schedule.'); } finally { setIsSubmitting(false); } }; - const handleDeleteSchedule = async (idToDelete: string) => { - if (!window.confirm(`Are you sure you want to delete schedule "${idToDelete}"?`)) return; + const handleDeleteSchedule = async (id: string) => { + if (!window.confirm(`Are you sure you want to delete schedule "${id}"?`)) return; - // Immediately add to deleting set to disable button - setDeletingScheduleIds((prev) => new Set(prev).add(idToDelete)); - - if (viewingScheduleId === idToDelete) { - setViewingScheduleId(null); - } + setActionsInProgress((prev) => new Set(prev).add(id)); + if (viewingScheduleId === id) setViewingScheduleId(null); setApiError(null); + try { - await deleteSchedule(idToDelete); + await deleteSchedule(id); await fetchSchedules(); } catch (error) { - console.error(`Failed to delete schedule "${idToDelete}":`, error); - setApiError( - error instanceof Error ? error.message : `Unknown error deleting "${idToDelete}".` - ); + console.error(`Failed to delete schedule "${id}":`, error); + setApiError(error instanceof Error ? error.message : `Unknown error deleting "${id}".`); } finally { - // Remove from deleting set - setDeletingScheduleIds((prev) => { + setActionsInProgress((prev) => { const newSet = new Set(prev); - newSet.delete(idToDelete); + newSet.delete(id); return newSet; }); } }; - const handlePauseSchedule = async (idToPause: string) => { - // Immediately add to pausing set to disable button - setPausingScheduleIds((prev) => new Set(prev).add(idToPause)); - + const handlePauseSchedule = async (id: string) => { + setActionsInProgress((prev) => new Set(prev).add(id)); setApiError(null); + try { - await pauseSchedule(idToPause); + await pauseSchedule(id); toastSuccess({ title: 'Schedule Paused', - msg: `Successfully paused schedule "${idToPause}"`, + msg: `Successfully paused schedule "${id}"`, }); await fetchSchedules(); } catch (error) { - console.error(`Failed to pause schedule "${idToPause}":`, error); - const errorMsg = - error instanceof Error ? error.message : `Unknown error pausing "${idToPause}".`; + console.error(`Failed to pause schedule "${id}":`, error); + const errorMsg = error instanceof Error ? error.message : `Unknown error pausing "${id}".`; setApiError(errorMsg); toastError({ title: 'Pause Schedule Error', msg: errorMsg, }); } finally { - // Remove from pausing set - setPausingScheduleIds((prev) => { + setActionsInProgress((prev) => { const newSet = new Set(prev); - newSet.delete(idToPause); + newSet.delete(id); return newSet; }); } }; - const handleUnpauseSchedule = async (idToUnpause: string) => { - // Immediately add to pausing set to disable button - setPausingScheduleIds((prev) => new Set(prev).add(idToUnpause)); - + const handleUnpauseSchedule = async (id: string) => { + setActionsInProgress((prev) => new Set(prev).add(id)); setApiError(null); + try { - await unpauseSchedule(idToUnpause); + await unpauseSchedule(id); toastSuccess({ title: 'Schedule Unpaused', - msg: `Successfully unpaused schedule "${idToUnpause}"`, + msg: `Successfully unpaused schedule "${id}"`, }); await fetchSchedules(); } catch (error) { - console.error(`Failed to unpause schedule "${idToUnpause}":`, error); - const errorMsg = - error instanceof Error ? error.message : `Unknown error unpausing "${idToUnpause}".`; + console.error(`Failed to unpause schedule "${id}":`, error); + const errorMsg = error instanceof Error ? error.message : `Unknown error unpausing "${id}".`; setApiError(errorMsg); toastError({ title: 'Unpause Schedule Error', msg: errorMsg, }); } finally { - // Remove from pausing set - setPausingScheduleIds((prev) => { + setActionsInProgress((prev) => { const newSet = new Set(prev); - newSet.delete(idToUnpause); + newSet.delete(id); return newSet; }); } }; - const handleKillRunningJob = async (scheduleId: string) => { - // Immediately add to killing set to disable button - setKillingScheduleIds((prev) => new Set(prev).add(scheduleId)); - + const handleKillRunningJob = async (id: string) => { + setActionsInProgress((prev) => new Set(prev).add(id)); setApiError(null); + try { - const result = await killRunningJob(scheduleId); + const result = await killRunningJob(id); toastSuccess({ title: 'Job Killed', msg: result.message, }); await fetchSchedules(); } catch (error) { - console.error(`Failed to kill running job "${scheduleId}":`, error); + console.error(`Failed to kill running job "${id}":`, error); const errorMsg = - error instanceof Error ? error.message : `Unknown error killing job "${scheduleId}".`; + error instanceof Error ? error.message : `Unknown error killing job "${id}".`; setApiError(errorMsg); toastError({ title: 'Kill Job Error', msg: errorMsg, }); } finally { - // Remove from killing set - setKillingScheduleIds((prev) => { + setActionsInProgress((prev) => { const newSet = new Set(prev); - newSet.delete(scheduleId); + newSet.delete(id); return newSet; }); } }; - const handleInspectRunningJob = async (scheduleId: string) => { - // Immediately add to inspecting set to disable button - setInspectingScheduleIds((prev) => new Set(prev).add(scheduleId)); - + const handleInspectRunningJob = async (id: string) => { + setActionsInProgress((prev) => new Set(prev).add(id)); setApiError(null); + try { - const result = await inspectRunningJob(scheduleId); + const result = await inspectRunningJob(id); if (result.sessionId) { const duration = result.runningDurationSeconds ? `${Math.floor(result.runningDurationSeconds / 60)}m ${result.runningDurationSeconds % 60}s` @@ -528,37 +400,28 @@ const SchedulesView: React.FC = ({ onClose: _onClose }) => { }); } } catch (error) { - console.error(`Failed to inspect running job "${scheduleId}":`, error); + console.error(`Failed to inspect running job "${id}":`, error); const errorMsg = - error instanceof Error ? error.message : `Unknown error inspecting job "${scheduleId}".`; + error instanceof Error ? error.message : `Unknown error inspecting job "${id}".`; setApiError(errorMsg); toastError({ title: 'Inspect Job Error', msg: errorMsg, }); } finally { - // Remove from inspecting set - setInspectingScheduleIds((prev) => { + setActionsInProgress((prev) => { const newSet = new Set(prev); - newSet.delete(scheduleId); + newSet.delete(id); return newSet; }); } }; - const handleNavigateToScheduleDetail = (scheduleId: string) => { - setViewingScheduleId(scheduleId); - }; - - const handleNavigateBackFromDetail = () => { - setViewingScheduleId(null); - }; - if (viewingScheduleId) { return ( setViewingScheduleId(null)} /> ); } @@ -583,7 +446,10 @@ const SchedulesView: React.FC = ({ onClose: _onClose }) => { {isRefreshing ? 'Refreshing...' : 'Refresh'}
@@ -648,21 +514,19 @@ const SchedulesView: React.FC = ({ onClose: _onClose }) => {
- - { + setIsModalOpen(false); + setEditingSchedule(null); + setSubmitApiError(null); + setPendingDeepLink(null); + }} + onSubmit={handleModalSubmit} schedule={editingSchedule} isLoadingExternally={isSubmitting} apiErrorExternally={submitApiError} + initialDeepLink={pendingDeepLink} /> ); diff --git a/ui/desktop/src/schedule.ts b/ui/desktop/src/schedule.ts index 32ecfcbfae79..460822e2768c 100644 --- a/ui/desktop/src/schedule.ts +++ b/ui/desktop/src/schedule.ts @@ -9,6 +9,7 @@ import { runNowHandler as apiRunScheduleNow, killRunningJob as apiKillRunningJob, inspectRunningJob as apiInspectRunningJob, + SessionDisplayInfo, } from './api'; export interface ScheduledJob { @@ -20,7 +21,6 @@ export interface ScheduledJob { paused?: boolean; current_session_id?: string | null; process_start_time?: string | null; - execution_mode?: string | null; // "foreground" or "background" } export interface ScheduleSession { @@ -82,23 +82,15 @@ export async function deleteSchedule(id: string): Promise { export async function getScheduleSessions( scheduleId: string, - limit?: number -): Promise { - try { - const response = await apiGetScheduleSessions({ - path: { id: scheduleId }, - query: { limit }, - }); - - if (response && response.data) { - return response.data as ScheduleSession[]; - } - console.error('Unexpected response format from apiGetScheduleSessions', response); - throw new Error('Failed to get schedule sessions: Unexpected response format'); - } catch (error) { - console.error(`Error fetching sessions for schedule ${scheduleId}:`, error); - throw error; - } + limit: number +): Promise> { + const response = await apiGetScheduleSessions({ + path: { id: scheduleId }, + query: { limit }, + throwOnError: true, + }); + + return response.data; } export async function runScheduleNow(scheduleId: string): Promise {