feat: multi-branch template scanning from git repo + manual template selection

- Rewrite template.rs to scan all remote branches via git commands
  (git fetch/branch -r/ls-tree/git show/git archive)
- Add manual template picker dropdown in CreateForm UI
- Remove sentence-transformers/embed.py from Dockerfile (separate container)
- Clean up Gitea API approach, use local git repo instead
- Add chat panel and sidebar layout improvements
This commit is contained in:
Fam Zheng
2026-03-07 16:24:56 +00:00
parent cb81d7eb41
commit 07f1f285b6
14 changed files with 1030 additions and 321 deletions

53
src/api/chat.rs Normal file
View File

@@ -0,0 +1,53 @@
use std::sync::Arc;
use axum::{
extract::State,
http::StatusCode,
response::{IntoResponse, Response},
routing::post,
Json, Router,
};
use serde::Deserialize;
use crate::llm::{ChatMessage, LlmClient};
use crate::AppState;
#[derive(Deserialize)]
struct ChatRequest {
messages: Vec<SimpleChatMessage>,
}
#[derive(Deserialize)]
struct SimpleChatMessage {
role: String,
content: String,
}
pub fn router(state: Arc<AppState>) -> Router {
Router::new()
.route("/chat", post(chat))
.with_state(state)
}
async fn chat(
State(state): State<Arc<AppState>>,
Json(input): Json<ChatRequest>,
) -> Result<Json<serde_json::Value>, Response> {
let llm = LlmClient::new(&state.config.llm);
let messages: Vec<ChatMessage> = input
.messages
.into_iter()
.map(|m| ChatMessage {
role: m.role,
content: Some(m.content),
tool_calls: None,
tool_call_id: None,
})
.collect();
let reply = llm.chat(messages).await.map_err(|e| {
tracing::error!("Chat LLM error: {}", e);
(StatusCode::INTERNAL_SERVER_ERROR, e.to_string()).into_response()
})?;
Ok(Json(serde_json::json!({ "reply": reply })))
}

View File

@@ -1,3 +1,4 @@
mod chat;
mod kb;
pub mod obj;
mod projects;
@@ -31,6 +32,7 @@ pub fn router(state: Arc<AppState>) -> Router {
.merge(timers::router(state.clone()))
.merge(kb::router(state.clone()))
.merge(settings::router(state.clone()))
.merge(chat::router(state.clone()))
.route("/projects/{id}/files/{*path}", get(serve_project_file))
.route("/projects/{id}/app/{*path}", any(proxy_to_service).with_state(state.clone()))
.route("/projects/{id}/app/", any(proxy_to_service_root).with_state(state))

View File

@@ -11,6 +11,7 @@ use crate::AppState;
use crate::agent::{AgentEvent, PlanStepInfo};
use crate::db::{Workflow, ExecutionLogEntry, Comment, LlmCallLogEntry};
use crate::state::AgentState;
use crate::template;
use super::{ApiResult, db_err};
#[derive(serde::Serialize)]
@@ -21,6 +22,8 @@ struct ReportResponse {
#[derive(Deserialize)]
pub struct CreateWorkflow {
pub requirement: String,
#[serde(default)]
pub template_id: Option<String>,
}
#[derive(Deserialize)]
@@ -36,6 +39,7 @@ pub fn router(state: Arc<AppState>) -> Router {
.route("/workflows/{id}/report", get(get_report))
.route("/workflows/{id}/plan", get(get_plan))
.route("/workflows/{id}/llm-calls", get(list_llm_calls))
.route("/templates", get(list_templates))
.with_state(state)
}
@@ -72,6 +76,7 @@ async fn create_workflow(
state.agent_mgr.send_event(&project_id, AgentEvent::NewRequirement {
workflow_id: workflow.id.clone(),
requirement: workflow.requirement.clone(),
template_id: input.template_id,
}).await;
Ok(Json(workflow))
@@ -191,3 +196,7 @@ async fn list_llm_calls(
.map(Json)
.map_err(db_err)
}
async fn list_templates() -> Json<Vec<template::TemplateListItem>> {
Json(template::list_all_templates().await)
}