Files
tori/src/sink.rs
Fam Zheng e4ba385112 refactor: worker mode — server offloads all LLM/exec to worker
- Split into `tori server` / `tori worker` subcommands (clap derive)
- Extract lib.rs for shared crate (agent, llm, exec, state, etc.)
- Introduce AgentUpdate channel to decouple agent loop from DB/broadcast
- New sink.rs: AgentUpdate enum + ServiceManager + handle_agent_updates
- New worker_runner.rs: connects to server WS, runs full agent loop
- Expand worker protocol: ServerToWorker (workflow_assign, comment)
  and WorkerToServer (register, result, update)
- Remove LLM from title generation (heuristic) and template selection
  (must be explicit)
- Remove KB tools (kb_search, kb_read) and remote worker tools
  (list_workers, execute_on_worker) from agent loop
- run_agent_loop/run_step_loop now take mpsc::Sender<AgentUpdate>
  instead of direct DB pool + broadcast sender
2026-04-06 12:54:57 +01:00

240 lines
9.0 KiB
Rust

use std::collections::HashMap;
use std::sync::Arc;
use std::sync::atomic::{AtomicU16, Ordering};
use serde::{Deserialize, Serialize};
use sqlx::sqlite::SqlitePool;
use tokio::sync::{RwLock, broadcast, mpsc};
use crate::agent::{PlanStepInfo, WsMessage, ServiceInfo};
use crate::state::{AgentState, Artifact};
/// All updates produced by the agent loop. This is the single output interface
/// that decouples the agent logic from DB persistence and WebSocket broadcasting.
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(tag = "kind")]
pub enum AgentUpdate {
PlanUpdate {
workflow_id: String,
steps: Vec<PlanStepInfo>,
},
WorkflowStatus {
workflow_id: String,
status: String,
},
Activity {
workflow_id: String,
activity: String,
},
ExecutionLog {
workflow_id: String,
step_order: i32,
tool_name: String,
tool_input: String,
output: String,
status: String,
},
LlmCallLog {
workflow_id: String,
step_order: i32,
phase: String,
messages_count: i32,
tools_count: i32,
tool_calls: String,
text_response: String,
prompt_tokens: Option<u32>,
completion_tokens: Option<u32>,
latency_ms: i64,
},
StateSnapshot {
workflow_id: String,
step_order: i32,
state: AgentState,
},
WorkflowComplete {
workflow_id: String,
status: String,
report: Option<String>,
},
ArtifactSave {
workflow_id: String,
step_order: i32,
artifact: Artifact,
},
RequirementUpdate {
workflow_id: String,
requirement: String,
},
Error {
message: String,
},
}
/// Manages local services (start_service / stop_service tools).
/// Created per-worker or per-agent-loop.
pub struct ServiceManager {
pub services: RwLock<HashMap<String, ServiceInfo>>,
next_port: AtomicU16,
}
impl ServiceManager {
pub fn new(start_port: u16) -> Arc<Self> {
Arc::new(Self {
services: RwLock::new(HashMap::new()),
next_port: AtomicU16::new(start_port),
})
}
pub fn allocate_port(&self) -> u16 {
self.next_port.fetch_add(1, Ordering::Relaxed)
}
}
/// Server-side handler: consumes AgentUpdate from channel, persists to DB and broadcasts to frontend.
pub async fn handle_agent_updates(
mut rx: mpsc::Receiver<AgentUpdate>,
pool: SqlitePool,
broadcast_tx: broadcast::Sender<WsMessage>,
) {
while let Some(update) = rx.recv().await {
match update {
AgentUpdate::PlanUpdate { workflow_id, steps } => {
let _ = broadcast_tx.send(WsMessage::PlanUpdate { workflow_id, steps });
}
AgentUpdate::WorkflowStatus { ref workflow_id, ref status } => {
let _ = sqlx::query("UPDATE workflows SET status = ? WHERE id = ?")
.bind(status)
.bind(workflow_id)
.execute(&pool)
.await;
let _ = broadcast_tx.send(WsMessage::WorkflowStatusUpdate {
workflow_id: workflow_id.clone(),
status: status.clone(),
});
}
AgentUpdate::Activity { workflow_id, activity } => {
let _ = broadcast_tx.send(WsMessage::ActivityUpdate { workflow_id, activity });
}
AgentUpdate::ExecutionLog { ref workflow_id, step_order, ref tool_name, ref tool_input, ref output, ref status } => {
let id = uuid::Uuid::new_v4().to_string();
let _ = sqlx::query(
"INSERT INTO execution_log (id, workflow_id, step_order, tool_name, tool_input, output, status, created_at) VALUES (?, ?, ?, ?, ?, ?, ?, datetime('now'))"
)
.bind(&id)
.bind(workflow_id)
.bind(step_order)
.bind(tool_name)
.bind(tool_input)
.bind(output)
.bind(status)
.execute(&pool)
.await;
let _ = broadcast_tx.send(WsMessage::StepStatusUpdate {
step_id: id,
status: status.clone(),
output: output.clone(),
});
}
AgentUpdate::LlmCallLog { ref workflow_id, step_order, ref phase, messages_count, tools_count, ref tool_calls, ref text_response, prompt_tokens, completion_tokens, latency_ms } => {
let id = uuid::Uuid::new_v4().to_string();
let _ = sqlx::query(
"INSERT INTO llm_call_log (id, workflow_id, step_order, phase, messages_count, tools_count, tool_calls, text_response, prompt_tokens, completion_tokens, latency_ms, created_at) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, datetime('now'))"
)
.bind(&id)
.bind(workflow_id)
.bind(step_order)
.bind(phase)
.bind(messages_count)
.bind(tools_count)
.bind(tool_calls)
.bind(text_response)
.bind(prompt_tokens.map(|v| v as i32))
.bind(completion_tokens.map(|v| v as i32))
.bind(latency_ms as i32)
.execute(&pool)
.await;
let entry = crate::db::LlmCallLogEntry {
id,
workflow_id: workflow_id.clone(),
step_order,
phase: phase.clone(),
messages_count,
tools_count,
tool_calls: tool_calls.clone(),
text_response: text_response.clone(),
prompt_tokens: prompt_tokens.map(|v| v as i32),
completion_tokens: completion_tokens.map(|v| v as i32),
latency_ms: latency_ms as i32,
created_at: String::new(),
};
let _ = broadcast_tx.send(WsMessage::LlmCallLog {
workflow_id: workflow_id.clone(),
entry,
});
}
AgentUpdate::StateSnapshot { ref workflow_id, step_order, ref state } => {
let id = uuid::Uuid::new_v4().to_string();
let json = serde_json::to_string(state).unwrap_or_default();
let _ = sqlx::query(
"INSERT INTO agent_state_snapshots (id, workflow_id, step_order, state_json, created_at) VALUES (?, ?, ?, ?, datetime('now'))"
)
.bind(&id)
.bind(workflow_id)
.bind(step_order)
.bind(&json)
.execute(&pool)
.await;
}
AgentUpdate::WorkflowComplete { ref workflow_id, ref status, ref report } => {
let _ = sqlx::query("UPDATE workflows SET status = ? WHERE id = ?")
.bind(status)
.bind(workflow_id)
.execute(&pool)
.await;
if let Some(ref r) = report {
let _ = sqlx::query("UPDATE workflows SET report = ? WHERE id = ?")
.bind(r)
.bind(workflow_id)
.execute(&pool)
.await;
let _ = broadcast_tx.send(WsMessage::ReportReady {
workflow_id: workflow_id.clone(),
});
}
let _ = broadcast_tx.send(WsMessage::WorkflowStatusUpdate {
workflow_id: workflow_id.clone(),
status: status.clone(),
});
}
AgentUpdate::ArtifactSave { ref workflow_id, step_order, ref artifact } => {
let id = uuid::Uuid::new_v4().to_string();
let _ = sqlx::query(
"INSERT INTO step_artifacts (id, workflow_id, step_order, name, path, artifact_type, description) VALUES (?, ?, ?, ?, ?, ?, ?)"
)
.bind(&id)
.bind(workflow_id)
.bind(step_order)
.bind(&artifact.name)
.bind(&artifact.path)
.bind(&artifact.artifact_type)
.bind(&artifact.description)
.execute(&pool)
.await;
}
AgentUpdate::RequirementUpdate { ref workflow_id, ref requirement } => {
let _ = sqlx::query("UPDATE workflows SET requirement = ? WHERE id = ?")
.bind(requirement)
.bind(workflow_id)
.execute(&pool)
.await;
let _ = broadcast_tx.send(WsMessage::RequirementUpdate {
workflow_id: workflow_id.clone(),
requirement: requirement.clone(),
});
}
AgentUpdate::Error { message } => {
let _ = broadcast_tx.send(WsMessage::Error { message });
}
}
}
}