Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
34 changes: 34 additions & 0 deletions crates/goose-server/src/routes/agent.rs
Original file line number Diff line number Diff line change
Expand Up @@ -67,6 +67,11 @@ pub struct StartAgentRequest {
recipe_deeplink: Option<String>,
}

#[derive(Deserialize, utoipa::ToSchema)]
pub struct StopAgentRequest {
session_id: String,
}

#[derive(Deserialize, utoipa::ToSchema)]
pub struct ResumeAgentRequest {
session_id: String,
Expand Down Expand Up @@ -582,6 +587,34 @@ async fn agent_remove_extension(
Ok(StatusCode::OK)
}

#[utoipa::path(
post,
path = "/agent/stop",
request_body = StopAgentRequest,
responses(
(status = 200, description = "Agent stopped successfully", body = String),
(status = 401, description = "Unauthorized - invalid secret key"),
(status = 404, description = "Session not found"),
(status = 500, description = "Internal server error")
)
)]
async fn stop_agent(
State(state): State<Arc<AppState>>,
Json(payload): Json<StopAgentRequest>,
) -> Result<StatusCode, ErrorResponse> {
let session_id = payload.session_id;
state
.agent_manager
.remove_session(&session_id)
.await
.map_err(|e| ErrorResponse {
message: format!("Failed to stop agent for session {}: {}", session_id, e),
status: StatusCode::NOT_FOUND,
})?;

Ok(StatusCode::OK)
}

pub fn routes(state: Arc<AppState>) -> Router {
Router::new()
.route("/agent/start", post(start_agent))
Expand All @@ -595,5 +628,6 @@ pub fn routes(state: Arc<AppState>) -> Router {
.route("/agent/update_from_session", post(update_from_session))
.route("/agent/add_extension", post(agent_add_extension))
.route("/agent/remove_extension", post(agent_remove_extension))
.route("/agent/stop", post(stop_agent))
.with_state(state)
}
1 change: 1 addition & 0 deletions crates/goose/src/config/base.rs
Original file line number Diff line number Diff line change
Expand Up @@ -807,6 +807,7 @@ config_value!(GOOSE_SEARCH_PATHS, Vec<String>);
config_value!(GOOSE_MODE, GooseMode);
config_value!(GOOSE_PROVIDER, String);
config_value!(GOOSE_MODEL, String);
config_value!(GOOSE_MAX_ACTIVE_AGENTS, usize);

/// Load init-config.yaml from workspace root if it exists.
/// This function is shared between the config recovery and the init_config endpoint.
Expand Down
6 changes: 5 additions & 1 deletion crates/goose/src/execution/manager.rs
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
use crate::agents::extension::PlatformExtensionContext;
use crate::agents::Agent;
use crate::config::paths::Paths;
use crate::config::Config;
use crate::scheduler::Scheduler;
use crate::scheduler_trait::SchedulerTrait;
use anyhow::Result;
Expand Down Expand Up @@ -52,7 +53,10 @@ impl AgentManager {
pub async fn instance() -> Result<Arc<Self>> {
AGENT_MANAGER
.get_or_try_init(|| async {
let manager = Self::new(Some(DEFAULT_MAX_SESSION)).await?;
let max_sessions = Config::global()
.get_goose_max_active_agents()
Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

this is a singleton so making it depend on a configuration variable only somewhat works

more to the point and I think we discussed this previously, I don't think the agent manager should be a cache and definitely not one with max=10 by default - we still have a rumbling subagent problem and that would mean if you start 10 subagents, the main agent dies.

so adding an agent/stop path is the right way, but the clients should either always handle this or the agents should be properly resumed (now it loads the default provider, no extensions etc)

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

so adding an agent/stop path is the right way, but the clients should either always handle this or the agents should be properly resumed (now it loads the default provider, no extensions etc)

I think #5419 resolves some of this

Copy link
Collaborator Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

restored default to 100

Copy link
Collaborator

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think #5419 resolves some of this

it's definitely on the way there. the next step would be to store the active extensions also and then we can move code from resume_agent to here and make sure that everybody uses the agent manager and then we can kill agents and also have agents restore themselves after a system restart

.unwrap_or(DEFAULT_MAX_SESSION);
let manager = Self::new(Some(max_sessions)).await?;
Ok(Arc::new(manager))
})
.await
Expand Down