add update_inner_state tool, life loop with tools, timeout protection

- update_inner_state: LLM can update its own persistent inner state
- inner_state injected into chat loop system prompt (read-only)
- Life Loop now uses run_openai_with_tools (full tool access)
- Life Loop LLM calls wrapped in 120s tokio::time::timeout
- All reqwest clients: 120s timeout (whisper: 60s)
- doc/life.md: life loop architecture design doc
- todo.md: removed completed items
This commit is contained in:
Fam Zheng
2026-04-09 21:06:43 +01:00
parent c3eb13dad3
commit 0b42f22f0f
7 changed files with 159 additions and 55 deletions

View File

@@ -1,13 +1,15 @@
use std::sync::Arc;
use teloxide::prelude::*;
use tracing::{error, info};
use tracing::{error, info, warn};
use crate::config::{BackendConfig, Config};
use crate::state::AppState;
use crate::stream::run_openai_streaming;
use crate::stream::run_openai_with_tools;
use crate::tools::compute_next_cron_fire;
const LIFE_LOOP_TIMEOUT_SECS: u64 = 120;
pub async fn life_loop(bot: Bot, state: Arc<AppState>, config: Arc<Config>) {
info!("life loop started");
let mut interval = tokio::time::interval(std::time::Duration::from_secs(30));
@@ -39,8 +41,7 @@ pub async fn life_loop(bot: Bot, state: Arc<AppState>, config: Arc<Config>) {
if inner.is_empty() { "(空)" } else { &inner }
));
system_text.push_str(
"\n\n你可以使用工具来完成任务。可以选择发消息给用户,也可以选择什么都不做(直接回复空文本)\
可以用 update_inner_state 更新你的内心状态。\
"\n\n你可以使用工具来完成任务。可以用 update_inner_state 更新你的内心状态\
输出格式纯文本或基础Markdown不要LaTeX或特殊Unicode。",
);
@@ -49,24 +50,36 @@ pub async fn life_loop(bot: Bot, state: Arc<AppState>, config: Arc<Config>) {
serde_json::json!({"role": "user", "content": format!("[timer] {label}")}),
];
// call LLM (no tools for now — keep life loop simple)
if let BackendConfig::OpenAI {
ref endpoint,
ref model,
ref api_key,
} = config.backend
{
match run_openai_streaming(endpoint, model, api_key, &messages, &bot, chat_id)
.await
{
Ok(response) => {
// synthetic session id for life loop (not tied to any real chat session)
let sid = format!("life-{chat_id_raw}");
let result = tokio::time::timeout(
std::time::Duration::from_secs(LIFE_LOOP_TIMEOUT_SECS),
run_openai_with_tools(
endpoint, model, api_key, messages, &bot, chat_id, &state, &sid,
&config, true,
),
)
.await;
match result {
Ok(Ok(response)) => {
if !response.is_empty() {
info!(timer_id, "life loop sent response ({} chars)", response.len());
info!(timer_id, "life loop response ({} chars)", response.len());
}
}
Err(e) => {
Ok(Err(e)) => {
error!(timer_id, "life loop LLM error: {e:#}");
}
Err(_) => {
warn!(timer_id, "life loop LLM timeout after {LIFE_LOOP_TIMEOUT_SECS}s");
}
}
}
@@ -79,7 +92,6 @@ pub async fn life_loop(bot: Bot, state: Arc<AppState>, config: Arc<Config>) {
state.cancel_timer(*timer_id).await;
}
} else {
// one-shot: delete after firing
state.cancel_timer(*timer_id).await;
}
}