refactor: split main.rs into 7 modules, add life loop with timer system
Structure: main.rs (534) — entry, handler, prompt building config.rs (52) — config structs state.rs (358) — AppState, SQLite, persistence tools.rs (665) — tool definitions, execution, subagent management stream.rs (776) — OpenAI/Claude streaming, system prompt display.rs (220)— markdown rendering, message formatting life.rs (87) — life loop heartbeat, timer firing New features: - Life Loop: background tokio task, 30s heartbeat, scans timers table - Timer tools: set_timer (relative/absolute/cron), list_timers, cancel_timer - inner_state table for life loop's own context - cron crate for recurring schedule parsing Zero logic changes in the refactor — pure structural split.
This commit is contained in:
87
src/life.rs
Normal file
87
src/life.rs
Normal file
@@ -0,0 +1,87 @@
|
||||
use std::sync::Arc;
|
||||
|
||||
use teloxide::prelude::*;
|
||||
use tracing::{error, info};
|
||||
|
||||
use crate::config::{BackendConfig, Config};
|
||||
use crate::state::AppState;
|
||||
use crate::stream::run_openai_streaming;
|
||||
use crate::tools::compute_next_cron_fire;
|
||||
|
||||
pub async fn life_loop(bot: Bot, state: Arc<AppState>, config: Arc<Config>) {
|
||||
info!("life loop started");
|
||||
let mut interval = tokio::time::interval(std::time::Duration::from_secs(30));
|
||||
|
||||
loop {
|
||||
interval.tick().await;
|
||||
|
||||
let due = state.due_timers().await;
|
||||
if due.is_empty() {
|
||||
continue;
|
||||
}
|
||||
|
||||
for (timer_id, chat_id_raw, label, schedule) in &due {
|
||||
let chat_id = ChatId(*chat_id_raw);
|
||||
info!(timer_id, %label, "timer fired");
|
||||
|
||||
// build life loop context
|
||||
let persona = state.get_config("persona").await.unwrap_or_default();
|
||||
let inner = state.get_inner_state().await;
|
||||
let now = chrono::Local::now().format("%Y-%m-%d %H:%M:%S").to_string();
|
||||
|
||||
let mut system_text = if persona.is_empty() {
|
||||
"你叫小乖,是Fam的AI伙伴。".to_string()
|
||||
} else {
|
||||
persona.clone()
|
||||
};
|
||||
system_text.push_str(&format!(
|
||||
"\n\n[当前时间] {now}\n\n[你的内心状态]\n{}",
|
||||
if inner.is_empty() { "(空)" } else { &inner }
|
||||
));
|
||||
system_text.push_str(
|
||||
"\n\n你可以使用工具来完成任务。你可以选择发消息给用户,也可以选择什么都不做(直接回复空文本)。\
|
||||
可以用 update_inner_state 更新你的内心状态。\
|
||||
输出格式:纯文本或基础Markdown,不要LaTeX或特殊Unicode。",
|
||||
);
|
||||
|
||||
let messages = vec![
|
||||
serde_json::json!({"role": "system", "content": system_text}),
|
||||
serde_json::json!({"role": "user", "content": format!("[timer] {label}")}),
|
||||
];
|
||||
|
||||
// call LLM (no tools for now — keep life loop simple)
|
||||
if let BackendConfig::OpenAI {
|
||||
ref endpoint,
|
||||
ref model,
|
||||
ref api_key,
|
||||
} = config.backend
|
||||
{
|
||||
match run_openai_streaming(endpoint, model, api_key, &messages, &bot, chat_id)
|
||||
.await
|
||||
{
|
||||
Ok(response) => {
|
||||
if !response.is_empty() {
|
||||
info!(timer_id, "life loop sent response ({} chars)", response.len());
|
||||
}
|
||||
}
|
||||
Err(e) => {
|
||||
error!(timer_id, "life loop LLM error: {e:#}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// reschedule or delete
|
||||
if schedule.starts_with("cron:") {
|
||||
if let Some(next) = compute_next_cron_fire(schedule) {
|
||||
state.update_timer_next_fire(*timer_id, &next).await;
|
||||
info!(timer_id, next = %next, "cron rescheduled");
|
||||
} else {
|
||||
state.cancel_timer(*timer_id).await;
|
||||
}
|
||||
} else {
|
||||
// one-shot: delete after firing
|
||||
state.cancel_timer(*timer_id).await;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user