refactor: worker mode — server offloads all LLM/exec to worker

- Split into `tori server` / `tori worker` subcommands (clap derive)
- Extract lib.rs for shared crate (agent, llm, exec, state, etc.)
- Introduce AgentUpdate channel to decouple agent loop from DB/broadcast
- New sink.rs: AgentUpdate enum + ServiceManager + handle_agent_updates
- New worker_runner.rs: connects to server WS, runs full agent loop
- Expand worker protocol: ServerToWorker (workflow_assign, comment)
  and WorkerToServer (register, result, update)
- Remove LLM from title generation (heuristic) and template selection
  (must be explicit)
- Remove KB tools (kb_search, kb_read) and remote worker tools
  (list_workers, execute_on_worker) from agent loop
- run_agent_loop/run_step_loop now take mpsc::Sender<AgentUpdate>
  instead of direct DB pool + broadcast sender
This commit is contained in:
2026-04-06 12:54:57 +01:00
parent 28a00dd2f3
commit e4ba385112
9 changed files with 1003 additions and 610 deletions

View File

@@ -3,7 +3,6 @@ use std::path::{Path, PathBuf};
use serde::Deserialize;
use crate::TemplateRepoConfig;
use crate::llm::{ChatMessage, LlmClient};
use crate::tools::ExternalToolManager;
#[derive(Debug, Deserialize)]
@@ -463,42 +462,6 @@ pub fn is_repo_template(template_id: &str) -> bool {
template_id.contains('/')
}
// --- LLM template selection ---
pub async fn select_template(llm: &LlmClient, requirement: &str, repo_cfg: Option<&TemplateRepoConfig>) -> Option<String> {
let all = list_all_templates(repo_cfg).await;
if all.is_empty() {
return None;
}
let listing: String = all
.iter()
.map(|t| format!("- id: {}\n 名称: {}\n 描述: {}", t.id, t.name, t.description))
.collect::<Vec<_>>()
.join("\n");
let prompt = format!(
"以下是可用的项目模板:\n{}\n\n用户需求:{}\n\n选择最匹配的模板 ID如果都不合适则回复 none。只回复模板 ID 或 none不要其他内容。",
listing, requirement
);
let response = llm
.chat(vec![
ChatMessage::system("你是一个模板选择助手。根据用户需求选择最合适的项目模板。只回复模板 ID 或 none。"),
ChatMessage::user(&prompt),
])
.await
.ok()?;
let answer = response.trim().to_lowercase();
tracing::info!("Template selection LLM response: '{}' (available: {:?})",
answer, all.iter().map(|t| t.id.as_str()).collect::<Vec<_>>());
if answer == "none" {
return None;
}
all.iter().find(|t| t.id == answer).map(|t| t.id.clone())
}
// --- Template loading ---