refactor: server no longer runs agent loop or LLM

- Remove agent_loop from server (was ~400 lines) — server dispatches to workers
- AgentManager simplified to pure dispatcher (send_event → worker)
- Remove LLM config requirement from server (workers bring their own via config.yaml)
- Remove process_feedback, build_feedback_tools from server
- Remove chat API endpoint (LLM on workers only)
- Remove service proxy (services run on workers)
- Worker reads LLM config from its own config.yaml
- ws_worker.rs handles WorkerToServer::Update messages (DB + broadcast)
- Verified locally: tori server + tori worker connect and register
This commit is contained in:
2026-04-06 13:18:21 +01:00
parent dfedb6dd45
commit decabc0e8a
9 changed files with 380 additions and 997 deletions

View File

@@ -1,53 +1,20 @@
use std::sync::Arc;
use axum::{
extract::State,
http::StatusCode,
response::{IntoResponse, Response},
routing::post,
Json, Router,
};
use serde::Deserialize;
use crate::llm::{ChatMessage, LlmClient};
use crate::AppState;
#[derive(Deserialize)]
struct ChatRequest {
messages: Vec<SimpleChatMessage>,
}
#[derive(Deserialize)]
struct SimpleChatMessage {
role: String,
content: String,
}
pub fn router(state: Arc<AppState>) -> Router {
Router::new()
.route("/chat", post(chat))
.with_state(state)
}
async fn chat(
State(state): State<Arc<AppState>>,
Json(input): Json<ChatRequest>,
) -> Result<Json<serde_json::Value>, Response> {
let llm = LlmClient::new(&state.config.llm);
let messages: Vec<ChatMessage> = input
.messages
.into_iter()
.map(|m| ChatMessage {
role: m.role,
content: Some(m.content),
tool_calls: None,
tool_call_id: None,
})
.collect();
let reply = llm.chat(messages).await.map_err(|e| {
tracing::error!("Chat LLM error: {}", e);
(StatusCode::INTERNAL_SERVER_ERROR, e.to_string()).into_response()
})?;
Ok(Json(serde_json::json!({ "reply": reply })))
async fn chat() -> Result<Json<serde_json::Value>, Response> {
// Chat endpoint removed — LLM runs on workers only
Err((StatusCode::GONE, "Chat endpoint removed. LLM runs on workers.").into_response())
}