add update_inner_state tool, life loop with tools, timeout protection

- update_inner_state: LLM can update its own persistent inner state
- inner_state injected into chat loop system prompt (read-only)
- Life Loop now uses run_openai_with_tools (full tool access)
- Life Loop LLM calls wrapped in 120s tokio::time::timeout
- All reqwest clients: 120s timeout (whisper: 60s)
- doc/life.md: life loop architecture design doc
- todo.md: removed completed items
This commit is contained in:
Fam Zheng
2026-04-09 21:06:43 +01:00
parent c3eb13dad3
commit 0b42f22f0f
7 changed files with 159 additions and 55 deletions

View File

@@ -1,13 +1,15 @@
use std::sync::Arc;
use teloxide::prelude::*;
use tracing::{error, info};
use tracing::{error, info, warn};
use crate::config::{BackendConfig, Config};
use crate::state::AppState;
use crate::stream::run_openai_streaming;
use crate::stream::run_openai_with_tools;
use crate::tools::compute_next_cron_fire;
const LIFE_LOOP_TIMEOUT_SECS: u64 = 120;
pub async fn life_loop(bot: Bot, state: Arc<AppState>, config: Arc<Config>) {
info!("life loop started");
let mut interval = tokio::time::interval(std::time::Duration::from_secs(30));
@@ -39,8 +41,7 @@ pub async fn life_loop(bot: Bot, state: Arc<AppState>, config: Arc<Config>) {
if inner.is_empty() { "(空)" } else { &inner }
));
system_text.push_str(
"\n\n你可以使用工具来完成任务。可以选择发消息给用户,也可以选择什么都不做(直接回复空文本)\
可以用 update_inner_state 更新你的内心状态。\
"\n\n你可以使用工具来完成任务。可以用 update_inner_state 更新你的内心状态\
输出格式纯文本或基础Markdown不要LaTeX或特殊Unicode。",
);
@@ -49,24 +50,36 @@ pub async fn life_loop(bot: Bot, state: Arc<AppState>, config: Arc<Config>) {
serde_json::json!({"role": "user", "content": format!("[timer] {label}")}),
];
// call LLM (no tools for now — keep life loop simple)
if let BackendConfig::OpenAI {
ref endpoint,
ref model,
ref api_key,
} = config.backend
{
match run_openai_streaming(endpoint, model, api_key, &messages, &bot, chat_id)
.await
{
Ok(response) => {
// synthetic session id for life loop (not tied to any real chat session)
let sid = format!("life-{chat_id_raw}");
let result = tokio::time::timeout(
std::time::Duration::from_secs(LIFE_LOOP_TIMEOUT_SECS),
run_openai_with_tools(
endpoint, model, api_key, messages, &bot, chat_id, &state, &sid,
&config, true,
),
)
.await;
match result {
Ok(Ok(response)) => {
if !response.is_empty() {
info!(timer_id, "life loop sent response ({} chars)", response.len());
info!(timer_id, "life loop response ({} chars)", response.len());
}
}
Err(e) => {
Ok(Err(e)) => {
error!(timer_id, "life loop LLM error: {e:#}");
}
Err(_) => {
warn!(timer_id, "life loop LLM timeout after {LIFE_LOOP_TIMEOUT_SECS}s");
}
}
}
@@ -79,7 +92,6 @@ pub async fn life_loop(bot: Bot, state: Arc<AppState>, config: Arc<Config>) {
state.cancel_timer(*timer_id).await;
}
} else {
// one-shot: delete after firing
state.cancel_timer(*timer_id).await;
}
}

View File

@@ -403,7 +403,8 @@ async fn handle_inner(
let conv = state.load_conv(&sid).await;
let persona = state.get_config("persona").await.unwrap_or_default();
let memory_slots = state.get_memory_slots().await;
let system_msg = build_system_prompt(&conv.summary, &persona, &memory_slots);
let inner = state.get_inner_state().await;
let system_msg = build_system_prompt(&conv.summary, &persona, &memory_slots, &inner);
let mut api_messages = vec![system_msg];
api_messages.extend(conv.messages);
@@ -514,7 +515,9 @@ fn build_prompt(
}
async fn transcribe_audio(whisper_url: &str, file_path: &Path) -> Result<String> {
let client = reqwest::Client::new();
let client = reqwest::Client::builder()
.timeout(std::time::Duration::from_secs(60))
.build()?;
let url = format!("{}/v1/audio/transcriptions", whisper_url.trim_end_matches('/'));
let file_bytes = tokio::fs::read(file_path).await?;
let file_name = file_path

View File

@@ -248,7 +248,6 @@ impl AppState {
.unwrap_or_default()
}
#[allow(dead_code)] // used by life loop tools (coming soon)
pub async fn set_inner_state(&self, content: &str) {
let db = self.db.lock().await;
let _ = db.execute(

View File

@@ -120,7 +120,10 @@ pub async fn run_openai_with_tools(
config: &Arc<Config>,
is_private: bool,
) -> Result<String> {
let client = reqwest::Client::new();
let client = reqwest::Client::builder()
.timeout(std::time::Duration::from_secs(120))
.build()
.unwrap();
let url = format!("{}/chat/completions", endpoint.trim_end_matches('/'));
let tools = discover_tools();
@@ -570,7 +573,10 @@ pub async fn run_openai_streaming(
bot: &Bot,
chat_id: ChatId,
) -> Result<String> {
let client = reqwest::Client::new();
let client = reqwest::Client::builder()
.timeout(std::time::Duration::from_secs(120))
.build()
.unwrap();
let url = format!("{}/chat/completions", endpoint.trim_end_matches('/'));
let body = serde_json::json!({
@@ -685,7 +691,7 @@ pub async fn run_openai_streaming(
Ok(accumulated)
}
pub fn build_system_prompt(summary: &str, persona: &str, memory_slots: &[(i32, String)]) -> serde_json::Value {
pub fn build_system_prompt(summary: &str, persona: &str, memory_slots: &[(i32, String)], inner_state: &str) -> serde_json::Value {
let mut text = if persona.is_empty() {
String::from("你是一个AI助手。")
} else {
@@ -708,6 +714,11 @@ pub fn build_system_prompt(summary: &str, persona: &str, memory_slots: &[(i32, S
}
}
if !inner_state.is_empty() {
text.push_str("\n\n## 你的内在状态\n");
text.push_str(inner_state);
}
if !summary.is_empty() {
text.push_str("\n\n## 之前的对话总结\n");
text.push_str(summary);
@@ -747,7 +758,10 @@ pub async fn summarize_messages(
)
};
let client = reqwest::Client::new();
let client = reqwest::Client::builder()
.timeout(std::time::Duration::from_secs(120))
.build()
.unwrap();
let url = format!("{}/chat/completions", endpoint.trim_end_matches('/'));
let body = serde_json::json!({

View File

@@ -103,6 +103,20 @@ pub fn discover_tools() -> serde_json::Value {
}
}
}),
serde_json::json!({
"type": "function",
"function": {
"name": "update_inner_state",
"description": "更新你的内在状态。这是你自己的持续意识跨会话保留Life Loop 和对话都能看到。记录你对当前情况的理解、正在跟踪的事、对 Fam 状态的感知等。",
"parameters": {
"type": "object",
"properties": {
"content": {"type": "string", "description": "完整的内在状态文本(替换之前的)"}
},
"required": ["content"]
}
}
}),
serde_json::json!({
"type": "function",
"function": {
@@ -275,6 +289,11 @@ pub async fn execute_tool(
Err(e) => format!("Failed to send file: {e:#}"),
}
}
"update_inner_state" => {
let content = args["content"].as_str().unwrap_or("");
state.set_inner_state(content).await;
format!("Inner state updated ({} chars)", content.len())
}
"update_scratch" => {
let content = args["content"].as_str().unwrap_or("");
state.push_scratch(content).await;
@@ -480,7 +499,8 @@ pub async fn agent_wakeup(
let conv = state.load_conv(sid).await;
let persona = state.get_config("persona").await.unwrap_or_default();
let memory_slots = state.get_memory_slots().await;
let system_msg = build_system_prompt(&conv.summary, &persona, &memory_slots);
let inner = state.get_inner_state().await;
let system_msg = build_system_prompt(&conv.summary, &persona, &memory_slots, &inner);
let mut api_messages = vec![system_msg];
api_messages.extend(conv.messages);